[ 494.617548] env[61356]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61356) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.617936] env[61356]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61356) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.617978] env[61356]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61356) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 494.618334] env[61356]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 494.704814] env[61356]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61356) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 494.714926] env[61356]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61356) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 494.857782] env[61356]: INFO nova.virt.driver [None req-c4a05226-0f0d-488b-ba4e-a113e1782e2e None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 494.932934] env[61356]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 494.933132] env[61356]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 494.933230] env[61356]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61356) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 498.072416] env[61356]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-3e4ba101-5afe-48e0-93dc-ac08642ebd06 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.088605] env[61356]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61356) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 498.088819] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-18ae6dab-db2c-4381-87cf-8e7289d4d4b9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.114084] env[61356]: INFO oslo_vmware.api [-] Successfully established new session; session ID is de432. [ 498.114289] env[61356]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.181s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.114804] env[61356]: INFO nova.virt.vmwareapi.driver [None req-c4a05226-0f0d-488b-ba4e-a113e1782e2e None None] VMware vCenter version: 7.0.3 [ 498.118466] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12fbe7d-7a36-4157-9567-21da1327da9c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.135954] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b691f0cf-4e78-4cf8-bc17-4b210d13ccda {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.142021] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534c5d80-d0b4-4b0b-acec-c8878d572f76 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.148694] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f00e15-51e8-4f2f-a90a-a6d1e6ece6dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.161760] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9600f8d8-2e9b-41af-906f-6780d9cba55b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.167482] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd99229-4c20-4766-9667-ba298f9d5d82 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.197212] env[61356]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-2a918e7f-c5ac-4e30-a4ae-4492cc9e743b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.201908] env[61356]: DEBUG nova.virt.vmwareapi.driver [None req-c4a05226-0f0d-488b-ba4e-a113e1782e2e None None] Extension org.openstack.compute already exists. {{(pid=61356) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 498.204573] env[61356]: INFO nova.compute.provider_config [None req-c4a05226-0f0d-488b-ba4e-a113e1782e2e None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 498.225765] env[61356]: DEBUG nova.context [None req-c4a05226-0f0d-488b-ba4e-a113e1782e2e None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),54575d1d-e26d-48d1-b507-79902816959d(cell1) {{(pid=61356) load_cells /opt/stack/nova/nova/context.py:464}} [ 498.228564] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.228857] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.229795] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.230382] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Acquiring lock "54575d1d-e26d-48d1-b507-79902816959d" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.230639] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Lock "54575d1d-e26d-48d1-b507-79902816959d" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.231754] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Lock "54575d1d-e26d-48d1-b507-79902816959d" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.251984] env[61356]: INFO dbcounter [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Registered counter for database nova_cell0 [ 498.260186] env[61356]: INFO dbcounter [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Registered counter for database nova_cell1 [ 498.263192] env[61356]: DEBUG oslo_db.sqlalchemy.engines [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61356) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.263544] env[61356]: DEBUG oslo_db.sqlalchemy.engines [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61356) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 498.268098] env[61356]: DEBUG dbcounter [-] [61356] Writer thread running {{(pid=61356) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.268830] env[61356]: DEBUG dbcounter [-] [61356] Writer thread running {{(pid=61356) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 498.271312] env[61356]: ERROR nova.db.main.api [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.271312] env[61356]: result = function(*args, **kwargs) [ 498.271312] env[61356]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.271312] env[61356]: return func(*args, **kwargs) [ 498.271312] env[61356]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.271312] env[61356]: result = fn(*args, **kwargs) [ 498.271312] env[61356]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.271312] env[61356]: return f(*args, **kwargs) [ 498.271312] env[61356]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.271312] env[61356]: return db.service_get_minimum_version(context, binaries) [ 498.271312] env[61356]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.271312] env[61356]: _check_db_access() [ 498.271312] env[61356]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.271312] env[61356]: stacktrace = ''.join(traceback.format_stack()) [ 498.271312] env[61356]: [ 498.272322] env[61356]: ERROR nova.db.main.api [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 498.272322] env[61356]: result = function(*args, **kwargs) [ 498.272322] env[61356]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 498.272322] env[61356]: return func(*args, **kwargs) [ 498.272322] env[61356]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 498.272322] env[61356]: result = fn(*args, **kwargs) [ 498.272322] env[61356]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 498.272322] env[61356]: return f(*args, **kwargs) [ 498.272322] env[61356]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 498.272322] env[61356]: return db.service_get_minimum_version(context, binaries) [ 498.272322] env[61356]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 498.272322] env[61356]: _check_db_access() [ 498.272322] env[61356]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 498.272322] env[61356]: stacktrace = ''.join(traceback.format_stack()) [ 498.272322] env[61356]: [ 498.272943] env[61356]: WARNING nova.objects.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 498.272943] env[61356]: WARNING nova.objects.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Failed to get minimum service version for cell 54575d1d-e26d-48d1-b507-79902816959d [ 498.273287] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Acquiring lock "singleton_lock" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 498.273449] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Acquired lock "singleton_lock" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 498.273694] env[61356]: DEBUG oslo_concurrency.lockutils [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Releasing lock "singleton_lock" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 498.274015] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Full set of CONF: {{(pid=61356) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 498.274165] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ******************************************************************************** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 498.274293] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] Configuration options gathered from: {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 498.274428] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 498.274616] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 498.274742] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ================================================================================ {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 498.274954] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] allow_resize_to_same_host = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.275132] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] arq_binding_timeout = 300 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.275264] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] backdoor_port = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.275389] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] backdoor_socket = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.275556] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] block_device_allocate_retries = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.275718] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] block_device_allocate_retries_interval = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.275891] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cert = self.pem {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.276074] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.276247] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute_monitors = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.276416] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] config_dir = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.276588] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] config_drive_format = iso9660 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.276722] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.276884] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] config_source = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.277065] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] console_host = devstack {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.277237] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] control_exchange = nova {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.277399] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cpu_allocation_ratio = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.277562] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] daemon = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.277731] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] debug = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.277889] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] default_access_ip_network_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.278065] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] default_availability_zone = nova {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.278230] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] default_ephemeral_format = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.278393] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] default_green_pool_size = 1000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.278624] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.278787] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] default_schedule_zone = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.278946] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] disk_allocation_ratio = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.279120] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] enable_new_services = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.279299] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] enabled_apis = ['osapi_compute'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.279463] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] enabled_ssl_apis = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.279625] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] flat_injected = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.279784] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] force_config_drive = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.279944] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] force_raw_images = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.280127] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] graceful_shutdown_timeout = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.280295] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] heal_instance_info_cache_interval = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.280534] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] host = cpu-1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.280714] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.280878] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] initial_disk_allocation_ratio = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.281050] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] initial_ram_allocation_ratio = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.281286] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.281459] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instance_build_timeout = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.281623] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instance_delete_interval = 300 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.281792] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instance_format = [instance: %(uuid)s] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.281960] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instance_name_template = instance-%08x {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.282136] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instance_usage_audit = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.282310] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instance_usage_audit_period = month {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.282481] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.282655] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] instances_path = /opt/stack/data/nova/instances {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.282844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] internal_service_availability_zone = internal {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.283017] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] key = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.283186] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] live_migration_retry_count = 30 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.283351] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_config_append = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.283519] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.283679] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_dir = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.283838] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.283967] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_options = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.284144] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_rotate_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.284313] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_rotate_interval_type = days {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.284477] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] log_rotation_type = none {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.284610] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.284739] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.284911] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.285093] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.285225] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.285392] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] long_rpc_timeout = 1800 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.285558] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] max_concurrent_builds = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.285718] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] max_concurrent_live_migrations = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.285880] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] max_concurrent_snapshots = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.286051] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] max_local_block_devices = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.286216] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] max_logfile_count = 30 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.286376] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] max_logfile_size_mb = 200 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.286537] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] maximum_instance_delete_attempts = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.286703] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] metadata_listen = 0.0.0.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.286869] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] metadata_listen_port = 8775 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.287046] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] metadata_workers = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.287211] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] migrate_max_retries = -1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.287378] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] mkisofs_cmd = genisoimage {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.287584] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] my_block_storage_ip = 10.180.1.21 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.287717] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] my_ip = 10.180.1.21 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.287880] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] network_allocate_retries = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.288074] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.288247] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] osapi_compute_listen = 0.0.0.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.288411] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] osapi_compute_listen_port = 8774 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.288578] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] osapi_compute_unique_server_name_scope = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.288746] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] osapi_compute_workers = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.288909] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] password_length = 12 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.289086] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] periodic_enable = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.289250] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] periodic_fuzzy_delay = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.289421] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] pointer_model = usbtablet {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.289591] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] preallocate_images = none {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.289750] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] publish_errors = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.289880] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] pybasedir = /opt/stack/nova {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.290048] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ram_allocation_ratio = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.290215] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] rate_limit_burst = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.290407] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] rate_limit_except_level = CRITICAL {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.290587] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] rate_limit_interval = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.290751] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] reboot_timeout = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.290910] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] reclaim_instance_interval = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.291079] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] record = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.291254] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] reimage_timeout_per_gb = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.291441] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] report_interval = 120 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.291613] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] rescue_timeout = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.291774] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] reserved_host_cpus = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.291934] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] reserved_host_disk_mb = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.292108] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] reserved_host_memory_mb = 512 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.292272] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] reserved_huge_pages = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.292435] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] resize_confirm_window = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.292599] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] resize_fs_using_block_device = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.292787] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] resume_guests_state_on_host_boot = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.292962] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.293140] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] rpc_response_timeout = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.293303] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] run_external_periodic_tasks = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.293472] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] running_deleted_instance_action = reap {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.293637] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] running_deleted_instance_poll_interval = 1800 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.293796] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] running_deleted_instance_timeout = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.293953] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler_instance_sync_interval = 120 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.294131] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_down_time = 720 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.294300] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] servicegroup_driver = db {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.294460] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] shelved_offload_time = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.294621] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] shelved_poll_interval = 3600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.294786] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] shutdown_timeout = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.294943] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] source_is_ipv6 = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.295156] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ssl_only = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.295360] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.295531] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] sync_power_state_interval = 600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.295693] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] sync_power_state_pool_size = 1000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.295860] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] syslog_log_facility = LOG_USER {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.296023] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] tempdir = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.296188] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] timeout_nbd = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.296353] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] transport_url = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.296518] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] update_resources_interval = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.296678] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] use_cow_images = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.296837] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] use_eventlog = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.296994] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] use_journal = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.297164] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] use_json = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.297324] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] use_rootwrap_daemon = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.297482] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] use_stderr = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.297643] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] use_syslog = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.297796] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vcpu_pin_set = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.297962] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plugging_is_fatal = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.298146] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plugging_timeout = 300 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.298318] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] virt_mkfs = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.298480] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] volume_usage_poll_interval = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.298641] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] watch_log_file = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.298809] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] web = /usr/share/spice-html5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 498.298990] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_concurrency.disable_process_locking = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.299298] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.299481] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.299649] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.299820] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.299986] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.300164] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.300364] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.auth_strategy = keystone {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.300542] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.compute_link_prefix = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.300720] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.300893] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.dhcp_domain = novalocal {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.301076] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.enable_instance_password = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.301246] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.glance_link_prefix = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.301434] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.301620] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.301782] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.instance_list_per_project_cells = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.301944] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.list_records_by_skipping_down_cells = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.302123] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.local_metadata_per_cell = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.302293] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.max_limit = 1000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.302460] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.metadata_cache_expiration = 15 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.302637] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.neutron_default_tenant_id = default {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.302835] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.use_neutron_default_nets = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.303014] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.303186] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.303356] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.303533] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.303705] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.vendordata_dynamic_targets = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.303875] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.vendordata_jsonfile_path = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.304067] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.304264] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.backend = dogpile.cache.memcached {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.304434] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.backend_argument = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.304609] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.config_prefix = cache.oslo {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.304780] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.dead_timeout = 60.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.304947] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.debug_cache_backend = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.305126] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.enable_retry_client = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.305293] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.enable_socket_keepalive = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.305464] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.enabled = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.305630] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.enforce_fips_mode = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.305798] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.expiration_time = 600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.305963] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.hashclient_retry_attempts = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.306146] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.hashclient_retry_delay = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.306312] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_dead_retry = 300 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.306472] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_password = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.306638] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.306803] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.306967] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_pool_maxsize = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.307144] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.307310] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_sasl_enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.307489] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.307659] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_socket_timeout = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.307821] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.memcache_username = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.307984] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.proxies = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.308159] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.redis_password = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.308331] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.redis_sentinel_service_name = mymaster {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.308507] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.308674] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.redis_server = localhost:6379 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.308837] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.redis_socket_timeout = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.308996] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.redis_username = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.309172] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.retry_attempts = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.309335] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.retry_delay = 0.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.309500] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.socket_keepalive_count = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.309661] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.socket_keepalive_idle = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.309819] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.socket_keepalive_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.309979] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.tls_allowed_ciphers = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.310154] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.tls_cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.310314] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.tls_certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.310510] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.tls_enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.310679] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cache.tls_keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.310851] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.311036] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.auth_type = password {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.311479] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.311479] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.catalog_info = volumev3::publicURL {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.311550] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.311718] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.311885] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.cross_az_attach = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.312106] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.debug = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.312276] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.endpoint_template = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.312442] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.http_retries = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.312653] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.312861] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.313095] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.os_region_name = RegionOne {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.313290] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.313471] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cinder.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.313660] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.313835] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.cpu_dedicated_set = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.313998] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.cpu_shared_set = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.314192] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.image_type_exclude_list = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.314382] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.314555] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.max_concurrent_disk_ops = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.314722] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.max_disk_devices_to_attach = -1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.314888] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.315085] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.315280] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.resource_provider_association_refresh = 300 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.315462] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.315673] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.shutdown_retry_interval = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.315956] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.316126] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] conductor.workers = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.316309] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] console.allowed_origins = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.316502] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] console.ssl_ciphers = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.316717] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] console.ssl_minimum_version = default {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.316968] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] consoleauth.enforce_session_timeout = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.317212] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] consoleauth.token_ttl = 600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.317460] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.317663] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.317863] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.318050] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.connect_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.318240] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.connect_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.318416] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.endpoint_override = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.318609] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.318810] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.319054] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.max_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.319253] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.min_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.319433] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.region_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.319632] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.retriable_status_codes = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.319845] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.service_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.320114] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.service_type = accelerator {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.320325] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.320560] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.status_code_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.320748] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.status_code_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.320924] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.321149] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.321334] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] cyborg.version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.321534] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.backend = sqlalchemy {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.321733] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.connection = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.322272] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.connection_debug = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.322272] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.connection_parameters = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.322401] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.connection_recycle_time = 3600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.322595] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.connection_trace = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.322807] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.db_inc_retry_interval = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.322991] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.db_max_retries = 20 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325593] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.db_max_retry_interval = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325593] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.db_retry_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325593] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.max_overflow = 50 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325593] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.max_pool_size = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325593] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.max_retries = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325593] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325772] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.mysql_wsrep_sync_wait = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325772] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.pool_timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325772] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.retry_interval = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325772] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.slave_connection = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325772] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.sqlite_synchronous = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325772] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] database.use_db_reconnect = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325928] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.backend = sqlalchemy {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325928] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.connection = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.325928] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.connection_debug = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.326023] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.connection_parameters = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.326424] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.connection_recycle_time = 3600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.326499] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.connection_trace = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.326629] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.db_inc_retry_interval = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.326821] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.db_max_retries = 20 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.326991] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.db_max_retry_interval = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.327173] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.db_retry_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.327351] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.max_overflow = 50 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.327572] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.max_pool_size = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.327813] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.max_retries = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.327976] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.328165] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.328354] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.pool_timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.328527] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.retry_interval = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.328711] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.slave_connection = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.328897] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] api_database.sqlite_synchronous = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.329127] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] devices.enabled_mdev_types = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.329360] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.329569] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ephemeral_storage_encryption.default_format = luks {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.329780] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ephemeral_storage_encryption.enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.329980] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.330196] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.api_servers = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.330420] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.330636] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.330836] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.331013] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.connect_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.331219] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.connect_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.331422] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.debug = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.331620] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.default_trusted_certificate_ids = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.331828] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.enable_certificate_validation = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.332072] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.enable_rbd_download = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.332289] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.endpoint_override = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.332500] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.332707] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.332883] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.max_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.333081] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.min_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.333287] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.num_retries = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.333509] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.rbd_ceph_conf = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.333734] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.rbd_connect_timeout = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.333934] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.rbd_pool = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.334140] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.rbd_user = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.334324] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.region_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.334523] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.retriable_status_codes = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.334711] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.service_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.334946] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.service_type = image {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.335155] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.335345] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.status_code_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.335520] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.status_code_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.335678] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.335858] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.336053] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.verify_glance_signatures = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.336237] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] glance.version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.336434] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] guestfs.debug = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.336649] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] mks.enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.337759] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.337759] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] image_cache.manager_interval = 2400 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.337759] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] image_cache.precache_concurrency = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.337759] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] image_cache.remove_unused_base_images = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.337904] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.338057] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.338264] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] image_cache.subdirectory_name = _base {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.338465] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.api_max_retries = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.338701] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.api_retry_interval = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.338853] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.339066] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.auth_type = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.339320] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.339531] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.339754] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.340117] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.conductor_group = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.340182] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.connect_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.340365] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.connect_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.340570] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.endpoint_override = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.340814] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.341031] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.341231] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.max_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.341432] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.min_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.341632] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.peer_list = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.341811] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.region_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.342016] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.retriable_status_codes = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.342264] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.serial_console_state_timeout = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.342464] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.service_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.342667] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.service_type = baremetal {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.342850] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.shard = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.343043] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.343215] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.status_code_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.343380] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.status_code_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.343606] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.344213] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.344213] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ironic.version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.344293] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.344469] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] key_manager.fixed_key = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.344715] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.344948] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.barbican_api_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.345153] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.barbican_endpoint = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.345388] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.barbican_endpoint_type = public {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.345575] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.barbican_region_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.345745] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.345942] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.346161] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.346496] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.346565] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.346765] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.number_of_retries = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.346944] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.retry_delay = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.347149] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.send_service_user_token = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.347361] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.347547] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.347740] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.verify_ssl = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.347923] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican.verify_ssl_path = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.348144] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.348334] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.auth_type = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.348524] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.348697] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.348881] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.349083] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.349302] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.349501] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.349670] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] barbican_service_user.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.349863] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.approle_role_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.350080] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.approle_secret_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.350272] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.350465] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.350741] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.350954] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.351167] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.351364] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.kv_mountpoint = secret {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.351558] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.kv_path = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.351740] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.kv_version = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.351937] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.namespace = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.352172] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.root_token_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.352363] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.352598] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.ssl_ca_crt_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.352781] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.352957] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.use_ssl = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.353172] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.353368] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.353559] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.auth_type = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.353771] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.353960] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.354150] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.354356] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.connect_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.354495] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.connect_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355505] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.endpoint_override = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355505] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355505] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355505] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.max_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355505] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.min_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355769] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.region_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355769] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.retriable_status_codes = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.355919] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.service_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.356091] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.service_type = identity {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.356262] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.356501] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.status_code_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.356718] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.status_code_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.357844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.357844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.357844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] keystone.version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.357844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.connection_uri = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.357844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.cpu_mode = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.357844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.cpu_model_extra_flags = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.358137] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.cpu_models = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.358293] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.cpu_power_governor_high = performance {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.358457] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.cpu_power_governor_low = powersave {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.358645] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.cpu_power_management = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.358822] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.359020] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.device_detach_attempts = 8 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.359222] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.device_detach_timeout = 20 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.359435] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.disk_cachemodes = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.359629] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.disk_prefix = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.359797] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.enabled_perf_events = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.359972] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.file_backed_memory = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.360156] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.gid_maps = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.360349] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.hw_disk_discard = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.360491] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.hw_machine_type = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.360668] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.images_rbd_ceph_conf = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.360844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.361057] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.361241] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.images_rbd_glance_store_name = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.361416] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.images_rbd_pool = rbd {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.361591] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.images_type = default {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.361754] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.images_volume_group = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.361920] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.inject_key = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.362097] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.inject_partition = -2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.362268] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.inject_password = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.362468] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.iscsi_iface = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.362642] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.iser_use_multipath = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.362808] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_bandwidth = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.362972] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.363152] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_downtime = 500 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.363317] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.363478] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.363643] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_inbound_addr = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.363814] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.364011] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_permit_post_copy = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.364186] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_scheme = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.364364] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_timeout_action = abort {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.364533] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_tunnelled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.364694] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_uri = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.364858] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.live_migration_with_native_tls = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.365027] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.max_queues = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.365199] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.365478] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.365650] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.nfs_mount_options = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.365978] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.366144] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.366311] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.num_iser_scan_tries = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.366475] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.num_memory_encrypted_guests = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.366643] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.366830] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.num_pcie_ports = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.367018] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.num_volume_scan_tries = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.367194] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.pmem_namespaces = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.367358] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.quobyte_client_cfg = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.367651] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.367824] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rbd_connect_timeout = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.367991] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.368183] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.368383] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rbd_secret_uuid = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.368554] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rbd_user = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.368721] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.368895] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.remote_filesystem_transport = ssh {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.369067] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rescue_image_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.369233] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rescue_kernel_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.369392] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rescue_ramdisk_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.369567] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.369745] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.rx_queue_size = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.369920] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.smbfs_mount_options = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.370217] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.370397] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.snapshot_compression = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.370560] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.snapshot_image_format = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.370781] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.370947] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.sparse_logical_volumes = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.371160] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.swtpm_enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.371346] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.swtpm_group = tss {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.371523] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.swtpm_user = tss {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.371693] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.sysinfo_serial = unique {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.371855] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.tb_cache_size = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.372023] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.tx_queue_size = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.372194] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.uid_maps = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.372359] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.use_virtio_for_bridges = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.372600] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.virt_type = kvm {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.372793] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.volume_clear = zero {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.372964] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.volume_clear_size = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.373151] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.volume_use_multipath = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.373316] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.vzstorage_cache_path = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.373488] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.373657] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.vzstorage_mount_group = qemu {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.373824] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.vzstorage_mount_opts = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.374040] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.374341] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.374524] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.vzstorage_mount_user = stack {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.374691] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.374865] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.375073] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.auth_type = password {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.375249] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.375445] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.375628] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.375792] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.connect_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.375954] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.connect_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.376145] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.default_floating_pool = public {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.376307] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.endpoint_override = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.376473] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.extension_sync_interval = 600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.376638] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.http_retries = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.376821] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.377016] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.377188] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.max_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.377361] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.377532] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.min_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.377705] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.ovs_bridge = br-int {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.377871] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.physnets = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.378052] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.region_name = RegionOne {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.378244] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.retriable_status_codes = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.378432] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.service_metadata_proxy = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.378597] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.service_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.378769] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.service_type = network {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.378932] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.379106] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.status_code_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.379269] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.status_code_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.379429] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.379611] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.379781] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] neutron.version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.379953] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] notifications.bdms_in_notifications = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.380145] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] notifications.default_level = INFO {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.380324] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] notifications.notification_format = unversioned {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.380492] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] notifications.notify_on_state_change = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.380668] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.380844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] pci.alias = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.381025] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] pci.device_spec = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.381485] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] pci.report_in_placement = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.381485] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.381595] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.auth_type = password {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.381740] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.381902] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.382081] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.382253] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.382413] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.connect_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.382644] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.connect_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.382840] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.default_domain_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.383016] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.default_domain_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.383186] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.domain_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.383349] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.domain_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.383511] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.endpoint_override = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.383674] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.383832] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.383992] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.max_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.384207] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.min_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.384387] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.password = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.384552] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.project_domain_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.384721] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.project_domain_name = Default {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.384887] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.project_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.385071] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.project_name = service {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.385244] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.region_name = RegionOne {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.385409] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.retriable_status_codes = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.385597] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.service_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.385800] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.service_type = placement {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.385970] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.386177] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.status_code_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.386312] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.status_code_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.386465] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.system_scope = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.386625] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.386784] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.trust_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.386946] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.user_domain_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.387158] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.user_domain_name = Default {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.387332] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.user_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.387504] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.username = placement {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.387686] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.387850] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] placement.version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.388037] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.cores = 20 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.388210] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.count_usage_from_placement = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.388385] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.388591] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.injected_file_content_bytes = 10240 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.388765] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.injected_file_path_length = 255 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.388932] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.injected_files = 5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.389114] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.instances = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.389283] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.key_pairs = 100 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.389450] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.metadata_items = 128 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.389618] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.ram = 51200 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.389781] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.recheck_quota = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.389986] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.server_group_members = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.390185] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] quota.server_groups = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.390386] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.390536] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.390700] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.image_metadata_prefilter = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.390863] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.391038] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.max_attempts = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.391208] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.max_placement_results = 1000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.391379] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.391585] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.query_placement_for_image_type_support = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.391766] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.391945] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] scheduler.workers = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.392139] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.392313] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.392490] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.392666] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.392830] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.393037] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.393224] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.393416] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.393587] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.host_subset_size = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.393753] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.393916] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.394093] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.394264] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.isolated_hosts = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.394452] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.isolated_images = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.394637] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.394809] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.394980] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.395160] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.pci_in_placement = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.395325] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.395487] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.395652] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.395833] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.396038] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.396213] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.396379] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.track_instance_changes = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.396560] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.396730] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] metrics.required = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.396896] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] metrics.weight_multiplier = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.397069] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.397244] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] metrics.weight_setting = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.397601] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.397787] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] serial_console.enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.397967] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] serial_console.port_range = 10000:20000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.398157] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.398331] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.398500] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] serial_console.serialproxy_port = 6083 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.398667] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.398872] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.auth_type = password {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.399054] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.399218] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.399383] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.399548] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.399705] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.399877] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.send_service_user_token = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.400050] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.400218] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] service_user.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.400421] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.agent_enabled = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.400584] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.400900] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.401107] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.401283] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.html5proxy_port = 6082 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.401456] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.image_compression = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.401649] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.jpeg_compression = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.401826] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.playback_compression = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.402012] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.server_listen = 127.0.0.1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.402188] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.402352] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.streaming_mode = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.402512] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] spice.zlib_compression = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.402676] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] upgrade_levels.baseapi = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.402845] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] upgrade_levels.compute = auto {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.403010] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] upgrade_levels.conductor = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.403252] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] upgrade_levels.scheduler = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.403437] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.403896] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.auth_type = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.403896] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.403962] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.404084] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.404253] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.404415] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.404583] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.404783] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vendordata_dynamic_auth.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.404967] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.api_retry_count = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.405146] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.ca_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.405321] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.cache_prefix = devstack-image-cache {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.405489] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.cluster_name = testcl1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.405658] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.connection_pool_size = 10 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.405818] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.console_delay_seconds = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.405988] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.datastore_regex = ^datastore.* {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.406243] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.406428] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.host_password = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.406601] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.host_port = 443 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.406771] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.host_username = administrator@vsphere.local {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.406941] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.insecure = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.407120] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.integration_bridge = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.407288] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.maximum_objects = 100 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.407447] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.pbm_default_policy = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.407661] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.pbm_enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.407817] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.pbm_wsdl_location = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.407990] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.408166] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.serial_port_proxy_uri = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.408327] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.serial_port_service_uri = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.408495] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.task_poll_interval = 0.5 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.408672] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.use_linked_clone = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.408841] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.vnc_keymap = en-us {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.409043] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.vnc_port = 5900 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.409224] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vmware.vnc_port_total = 10000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.409409] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.auth_schemes = ['none'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.409585] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.409881] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.410075] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.410253] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.novncproxy_port = 6080 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.410459] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.server_listen = 127.0.0.1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.410642] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.410807] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.vencrypt_ca_certs = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.410966] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.vencrypt_client_cert = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.411141] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vnc.vencrypt_client_key = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.411322] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.411513] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.disable_deep_image_inspection = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.411683] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.411885] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.412075] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.412243] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.disable_rootwrap = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.412406] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.enable_numa_live_migration = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.412570] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.412735] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.412897] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.413068] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.libvirt_disable_apic = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.413246] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.413489] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.413671] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.413840] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.414011] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.414184] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.414350] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.414513] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.414674] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.414837] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.415077] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.415259] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.client_socket_timeout = 900 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.415427] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.default_pool_size = 1000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.415597] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.keep_alive = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.415764] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.max_header_line = 16384 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.415926] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.secure_proxy_ssl_header = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.416102] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.ssl_ca_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.416267] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.ssl_cert_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.416435] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.ssl_key_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.416645] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.tcp_keepidle = 600 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.416843] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.417016] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] zvm.ca_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.417185] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] zvm.cloud_connector_url = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.417468] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.417677] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] zvm.reachable_timeout = 300 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.417874] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.enforce_new_defaults = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.418085] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.enforce_scope = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.418285] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.policy_default_rule = default {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.418469] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.418649] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.policy_file = policy.yaml {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.418822] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.418986] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.419164] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.419327] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.419490] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.419671] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.419849] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.420038] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.connection_string = messaging:// {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.420214] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.enabled = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.420410] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.es_doc_type = notification {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.420586] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.es_scroll_size = 10000 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.420757] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.es_scroll_time = 2m {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.420922] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.filter_error_trace = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.421103] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.hmac_keys = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.421277] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.sentinel_service_name = mymaster {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.421463] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.socket_timeout = 0.1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.421640] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.trace_requests = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.421806] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler.trace_sqlalchemy = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.421980] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler_jaeger.process_tags = {} {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.422157] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler_jaeger.service_name_prefix = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.422321] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] profiler_otlp.service_name_prefix = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.422487] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] remote_debug.host = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.422648] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] remote_debug.port = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.422829] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.422993] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.423175] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.423340] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.423506] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.423669] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.423829] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.423993] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.424171] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.424344] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.424504] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.424674] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.424840] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.425014] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.425194] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.425364] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.425530] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.425701] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.425863] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.426036] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.426213] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.426381] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.426542] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.426710] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.426868] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.427037] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.427205] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.427367] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.427535] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.427700] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.ssl = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.427881] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.428052] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.428221] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.428393] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.428565] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.ssl_version = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.428727] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.428916] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.429095] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_notifications.retry = -1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.429280] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.429457] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_messaging_notifications.transport_url = **** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.429648] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.auth_section = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.429830] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.auth_type = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.429992] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.cafile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.430163] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.certfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.430361] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.collect_timing = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.430517] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.connect_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.430680] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.connect_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.430839] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.endpoint_id = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.430995] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.endpoint_override = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.431170] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.insecure = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.431336] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.keyfile = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.431518] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.max_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.431671] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.min_version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.431830] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.region_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.431986] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.retriable_status_codes = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.432161] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.service_name = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.432319] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.service_type = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.432480] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.split_loggers = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.432641] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.status_code_retries = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.432798] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.status_code_retry_delay = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.432953] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.timeout = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.433125] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.valid_interfaces = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.433281] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_limit.version = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.433444] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_reports.file_event_handler = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.433610] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.433767] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] oslo_reports.log_dir = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.433935] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.434103] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.434262] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.434427] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.434592] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.434751] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.434917] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.435088] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_ovs_privileged.group = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.435249] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.435414] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.435578] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.435735] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] vif_plug_ovs_privileged.user = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.435902] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.flat_interface = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.436087] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.436265] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.436436] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.436608] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.436773] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.436936] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.437109] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.437288] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.437457] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_ovs.isolate_vif = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.437631] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.437798] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.437993] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.438147] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_ovs.ovsdb_interface = native {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.438310] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_vif_ovs.per_port_bridge = False {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.438475] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_brick.lock_path = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.438642] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.438804] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.438969] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] privsep_osbrick.capabilities = [21] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.439141] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] privsep_osbrick.group = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.439299] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] privsep_osbrick.helper_command = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.439463] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.439631] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.439787] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] privsep_osbrick.user = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.439958] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.440149] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] nova_sys_admin.group = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.440314] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] nova_sys_admin.helper_command = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.440513] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.440684] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.440844] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] nova_sys_admin.user = None {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 498.440975] env[61356]: DEBUG oslo_service.service [None req-77ea03f3-a31f-4dcc-aed5-a0a9918ab4aa None None] ******************************************************************************** {{(pid=61356) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 498.441421] env[61356]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 498.451738] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Getting list of instances from cluster (obj){ [ 498.451738] env[61356]: value = "domain-c8" [ 498.451738] env[61356]: _type = "ClusterComputeResource" [ 498.451738] env[61356]: } {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 498.452960] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732c520d-bae3-4b19-a6cb-b7cec7f763e0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.462315] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Got total of 0 instances {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 498.462822] env[61356]: WARNING nova.virt.vmwareapi.driver [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 498.463270] env[61356]: INFO nova.virt.node [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Generated node identity cfb833eb-84f8-480d-866a-1ad2fb871484 [ 498.463486] env[61356]: INFO nova.virt.node [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Wrote node identity cfb833eb-84f8-480d-866a-1ad2fb871484 to /opt/stack/data/n-cpu-1/compute_id [ 498.476433] env[61356]: WARNING nova.compute.manager [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Compute nodes ['cfb833eb-84f8-480d-866a-1ad2fb871484'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 498.512687] env[61356]: INFO nova.compute.manager [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 498.535104] env[61356]: WARNING nova.compute.manager [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 498.535358] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.535590] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.535757] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.535916] env[61356]: DEBUG nova.compute.resource_tracker [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 498.537040] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51e48f0-9af4-460d-b312-ea87d492bd7d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.545159] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a84d4d3-22d3-430d-bcd1-c5fa1c75f7ee {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.559217] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c8531b-b1de-47a3-bdf7-55f201e8c1b6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.565681] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a229100-9d91-455b-b833-4392a9125da5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.596335] env[61356]: DEBUG nova.compute.resource_tracker [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180706MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 498.596489] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 498.596679] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 498.608884] env[61356]: WARNING nova.compute.resource_tracker [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] No compute node record for cpu-1:cfb833eb-84f8-480d-866a-1ad2fb871484: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cfb833eb-84f8-480d-866a-1ad2fb871484 could not be found. [ 498.621819] env[61356]: INFO nova.compute.resource_tracker [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: cfb833eb-84f8-480d-866a-1ad2fb871484 [ 498.674373] env[61356]: DEBUG nova.compute.resource_tracker [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 498.674572] env[61356]: DEBUG nova.compute.resource_tracker [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=110GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 498.782527] env[61356]: INFO nova.scheduler.client.report [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] [req-9988b7a2-a6bd-41b9-a714-e8dc3ebef57f] Created resource provider record via placement API for resource provider with UUID cfb833eb-84f8-480d-866a-1ad2fb871484 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 498.800583] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45208688-6982-4343-b792-ed655a2e9968 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.808376] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd23ad0-bfa2-4e53-83a0-5451145c2b68 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.837702] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0656c3c0-5a18-44eb-a9b0-db1e5340bc94 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.844971] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e210e07a-109d-44bc-a786-056c565a5270 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 498.858500] env[61356]: DEBUG nova.compute.provider_tree [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Updating inventory in ProviderTree for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 498.899692] env[61356]: DEBUG nova.scheduler.client.report [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Updated inventory for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 498.899938] env[61356]: DEBUG nova.compute.provider_tree [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Updating resource provider cfb833eb-84f8-480d-866a-1ad2fb871484 generation from 0 to 1 during operation: update_inventory {{(pid=61356) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 498.900096] env[61356]: DEBUG nova.compute.provider_tree [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Updating inventory in ProviderTree for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 498.952460] env[61356]: DEBUG nova.compute.provider_tree [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Updating resource provider cfb833eb-84f8-480d-866a-1ad2fb871484 generation from 1 to 2 during operation: update_traits {{(pid=61356) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 498.970529] env[61356]: DEBUG nova.compute.resource_tracker [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 498.970742] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.374s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.970908] env[61356]: DEBUG nova.service [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Creating RPC server for service compute {{(pid=61356) start /opt/stack/nova/nova/service.py:182}} [ 498.983751] env[61356]: DEBUG nova.service [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] Join ServiceGroup membership for this service compute {{(pid=61356) start /opt/stack/nova/nova/service.py:199}} [ 498.983943] env[61356]: DEBUG nova.servicegroup.drivers.db [None req-5b753ecd-154f-4c11-a966-5d464e63ec21 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61356) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 508.270391] env[61356]: DEBUG dbcounter [-] [61356] Writing DB stats nova_cell0:SELECT=1 {{(pid=61356) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 508.271173] env[61356]: DEBUG dbcounter [-] [61356] Writing DB stats nova_cell1:SELECT=1 {{(pid=61356) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 518.985873] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 518.998538] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Getting list of instances from cluster (obj){ [ 518.998538] env[61356]: value = "domain-c8" [ 518.998538] env[61356]: _type = "ClusterComputeResource" [ 518.998538] env[61356]: } {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 518.999758] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6242d636-f19d-4406-aa75-d499279d274c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.008443] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Got total of 0 instances {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 519.008705] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 519.009034] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Getting list of instances from cluster (obj){ [ 519.009034] env[61356]: value = "domain-c8" [ 519.009034] env[61356]: _type = "ClusterComputeResource" [ 519.009034] env[61356]: } {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 519.009861] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8db5107-a596-4f14-8fbc-bd2836bcced2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.017013] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Got total of 0 instances {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 542.919442] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.921625] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.946935] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 543.081200] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.081679] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.084658] env[61356]: INFO nova.compute.claims [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.299789] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5985f39-c2e9-401b-a4e1-37f76216c632 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.310014] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97f96de-8dca-47e3-8a95-673723eb0a0e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.348387] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28789457-9ba0-4faa-9eea-c978595b0277 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.362083] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6d9e33-a930-4a6c-8fcf-d33c3f4ad4f6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.383627] env[61356]: DEBUG nova.compute.provider_tree [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.399112] env[61356]: DEBUG nova.scheduler.client.report [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 543.425045] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.427391] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 543.498154] env[61356]: DEBUG nova.compute.utils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 543.500259] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 543.503540] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 543.523169] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 543.651895] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 543.759816] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "6a2bad10-6948-482a-803a-2da7cf733d49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.760090] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "6a2bad10-6948-482a-803a-2da7cf733d49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.771918] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 543.771918] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 543.771918] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.772134] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 543.772134] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.772134] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 543.772134] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 543.772134] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 543.775232] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 543.775232] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 543.775450] env[61356]: DEBUG nova.virt.hardware [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 543.776604] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a08096-d82e-42c2-b902-a0acc55b8d5d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.788473] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4aee2d2-2472-483f-869b-980edd290937 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.794494] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 543.810637] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccc9afd-4e55-49e2-8489-2beddf5d8760 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.863511] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.863763] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.865752] env[61356]: INFO nova.compute.claims [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.001455] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96e9b35-19c6-47ed-8619-a7802f3db9fe {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.014749] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334912a9-3abc-4e51-81ff-081c81d467f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.056752] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70976049-10e4-428c-ba0a-c89bad455b03 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.066267] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3472fc0-c283-47d0-b0fc-631a9c411810 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.082310] env[61356]: DEBUG nova.compute.provider_tree [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.098088] env[61356]: DEBUG nova.scheduler.client.report [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.127327] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.127577] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 544.157743] env[61356]: DEBUG nova.policy [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88792820aa3441e986522ccf9b2f7a43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9e03c4102e846819717689589fd7107', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 544.191369] env[61356]: DEBUG nova.compute.utils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.193044] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 544.193539] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.223440] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 544.319222] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 544.360076] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.360521] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.360809] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.361153] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.361470] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.361864] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.362235] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.362610] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.362906] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.363234] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.363540] env[61356]: DEBUG nova.virt.hardware [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.364874] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5131d028-b661-45f3-95e5-bb6bce244a37 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.375442] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0fecc3-c0d9-4df6-a6d5-7f4d9c6ca7d5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.576348] env[61356]: DEBUG nova.policy [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94d6deb96c6f44c9a1cab0e74f3f2f72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08984d81a5404c1f9b3f7955a2dc4721', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 544.644728] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquiring lock "fce0ff0d-d055-422e-ae51-4989abff4b00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.644958] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Lock "fce0ff0d-d055-422e-ae51-4989abff4b00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.663835] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 544.758916] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.759261] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.762013] env[61356]: INFO nova.compute.claims [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.926167] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277a88dc-245a-417d-a43b-369d83d71f04 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.938678] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c077fb-77ce-48fd-bb4a-d9941d265132 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.973417] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe4df5d-017f-4a05-accb-a291f19d304e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.983047] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096b8732-9e08-4a06-b663-e5f11cfe7d44 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.997131] env[61356]: DEBUG nova.compute.provider_tree [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.010739] env[61356]: DEBUG nova.scheduler.client.report [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.027158] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.268s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.027746] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 545.127797] env[61356]: DEBUG nova.compute.utils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.129398] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 545.129657] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 545.150707] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 545.266179] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 545.303331] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 545.303549] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 545.304267] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 545.304571] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 545.304688] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 545.304826] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 545.305042] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 545.305202] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 545.305366] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 545.305522] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 545.305688] env[61356]: DEBUG nova.virt.hardware [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 545.306874] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62392f72-0e31-4859-b8a0-3c0a213839b4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.315074] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de386a8-5a5a-4e04-a452-f9f2368f8b0f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.352768] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquiring lock "314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.353613] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Lock "314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.373037] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 545.447551] env[61356]: DEBUG nova.policy [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '636db5d449734dec93701d91d7733010', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '628945a5ffec417db819942b4ae96146', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 545.460430] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.460675] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.462280] env[61356]: INFO nova.compute.claims [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.578127] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquiring lock "bcf27023-1eb6-4846-9adc-2235c738cad6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.582771] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "bcf27023-1eb6-4846-9adc-2235c738cad6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.005s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.627505] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 545.729317] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2dcc956-2e69-407f-9dc8-e4078304e059 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.737502] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9961a87-f019-4e6f-96a4-94114d4a2663 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.776019] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.776807] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21547b6-8a94-4f80-8d22-4b469c295e73 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.784399] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0873d2e8-b0f5-476c-b97b-eaaa1719ea51 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.800817] env[61356]: DEBUG nova.compute.provider_tree [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.815853] env[61356]: DEBUG nova.scheduler.client.report [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 545.841809] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.381s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.842392] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 545.845854] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.069s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.847322] env[61356]: INFO nova.compute.claims [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.923138] env[61356]: DEBUG nova.compute.utils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.930228] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Not allocating networking since 'none' was specified. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 545.947780] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 546.062352] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 546.107202] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.109718] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.109718] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.109718] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.109718] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.109718] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.110022] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.110022] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.110022] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.110022] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.110261] env[61356]: DEBUG nova.virt.hardware [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.111453] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763e31f7-b5ba-4ac1-a27c-063c20ecb876 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.116960] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec74e42-3f44-46e4-ba90-cfc21284e03e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.125449] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359ae8ad-af7f-4c5e-919d-f9370bbc907d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.143683] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Instance VIF info [] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 546.157054] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.159533] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-266bfff6-299c-4002-95e7-68ef1216d462 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.162151] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d448d2-e7d6-4899-af1e-57bf28adb1e2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.204162] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f098af-6dca-40a1-99c6-12ce2ec6dc39 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.208390] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Created folder: OpenStack in parent group-v4. [ 546.211325] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Creating folder: Project (1168273e0d0b45fdbaf62d8158f86d8d). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.211325] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d72d4a67-1c53-43ef-9457-cd1f5580dbff {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.217343] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff462ffe-266c-448d-9724-d658d4f6ff20 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.227467] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Created folder: Project (1168273e0d0b45fdbaf62d8158f86d8d) in parent group-v846926. [ 546.227771] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Creating folder: Instances. Parent ref: group-v846927. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 546.228375] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96799458-83fc-4371-bde6-f1680c3c8716 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.239068] env[61356]: DEBUG nova.compute.provider_tree [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.250020] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Created folder: Instances in parent group-v846927. [ 546.250020] env[61356]: DEBUG oslo.service.loopingcall [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 546.250194] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 546.250422] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af2144fa-d162-4459-a815-d9a3a063ab2c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.270327] env[61356]: DEBUG nova.scheduler.client.report [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.282706] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 546.282706] env[61356]: value = "task-4302445" [ 546.282706] env[61356]: _type = "Task" [ 546.282706] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.287974] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.443s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.288573] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 546.300590] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302445, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.373498] env[61356]: DEBUG nova.compute.utils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 546.378522] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 546.378667] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 546.391172] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 546.498756] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 546.536156] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.538083] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.538083] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.538083] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.538083] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.538083] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.538283] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.538283] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.538394] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.538539] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.538706] env[61356]: DEBUG nova.virt.hardware [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.541207] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994b947a-064c-44a1-8039-f3d746082a51 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.554313] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf5e08f-298b-473b-9dd6-cd823d5be9be {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.792662] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302445, 'name': CreateVM_Task} progress is 99%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.190439] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Successfully created port: e1349182-a36a-4f86-8bc7-03e46648d6af {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.294196] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302445, 'name': CreateVM_Task, 'duration_secs': 0.888809} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.294377] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 547.295440] env[61356]: DEBUG oslo_vmware.service [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30fcd5d-7625-4cb4-ac25-05f182c8f5fd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.306980] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.307134] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.307793] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 547.308073] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f183868-81e4-41f6-9db6-d3deeb9f932d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.314623] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Waiting for the task: (returnval){ [ 547.314623] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5274662a-4036-dd74-113c-d32ec21fd5fb" [ 547.314623] env[61356]: _type = "Task" [ 547.314623] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.324658] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5274662a-4036-dd74-113c-d32ec21fd5fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.490045] env[61356]: DEBUG nova.policy [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca5090121c39486d851f7a4c9db0527e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad89b23b217b461d9af2f475c9de1b14', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 547.714185] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Successfully created port: ee1183ed-658a-4951-85d0-4bb37b62ad0a {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.827102] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.827611] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 547.827611] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.828231] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.830151] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 547.830472] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f508b07-b1b5-45f6-b7cd-e5eda17d4733 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.838597] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 547.838796] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 547.839640] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfcbc87-1c53-4dbd-8e74-9e6812112122 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.848294] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4301e5-e647-447a-8eb3-ff07e67293ff {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.854239] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Waiting for the task: (returnval){ [ 547.854239] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52bee5d7-1776-adb2-3a0b-6ecb3d96e24a" [ 547.854239] env[61356]: _type = "Task" [ 547.854239] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.865795] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52bee5d7-1776-adb2-3a0b-6ecb3d96e24a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.370986] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 548.371996] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Creating directory with path [datastore2] vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 548.371996] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e51d8dae-6fce-4893-871f-1bdc0a47c8f0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.396596] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Created directory with path [datastore2] vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 548.396596] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Fetch image to [datastore2] vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 548.396596] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 548.398258] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23fa63c-6ea4-4a3b-ad98-cae2aaef74c1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.411105] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9150bcd4-aad6-47b4-a94e-0362aaa4ce38 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.422845] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed7820a-c336-4d63-a600-054d563442cc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.473626] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae50c065-9ece-4448-9b9c-d9edbe3539dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.480517] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6ec3cba4-fa17-4ea5-9bcc-3953f8c5017f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.506437] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 548.606796] env[61356]: DEBUG oslo_vmware.rw_handles [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 548.680202] env[61356]: DEBUG oslo_vmware.rw_handles [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 548.680425] env[61356]: DEBUG oslo_vmware.rw_handles [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 548.954258] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Successfully created port: 3bc87eaf-85bb-4dad-9145-e13bea0f20aa {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.831899] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Successfully created port: 90fa8a8d-fa15-4603-a017-f6a48e8506aa {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 551.180272] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Successfully created port: 8978d6b2-cccb-490f-b5af-e21b78dcf76e {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.274348] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Successfully created port: c22d264b-3153-49b8-a005-dfb6ef240830 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.220839] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Successfully updated port: 3bc87eaf-85bb-4dad-9145-e13bea0f20aa {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 553.237746] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquiring lock "refresh_cache-fce0ff0d-d055-422e-ae51-4989abff4b00" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.237877] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquired lock "refresh_cache-fce0ff0d-d055-422e-ae51-4989abff4b00" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.238049] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.391378] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.423637] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Successfully updated port: ee1183ed-658a-4951-85d0-4bb37b62ad0a {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 553.437248] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "refresh_cache-ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.437561] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired lock "refresh_cache-ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.438786] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.890242] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.277974] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Updating instance_info_cache with network_info: [{"id": "3bc87eaf-85bb-4dad-9145-e13bea0f20aa", "address": "fa:16:3e:fd:9a:d0", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bc87eaf-85", "ovs_interfaceid": "3bc87eaf-85bb-4dad-9145-e13bea0f20aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.310442] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Releasing lock "refresh_cache-fce0ff0d-d055-422e-ae51-4989abff4b00" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.310442] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Instance network_info: |[{"id": "3bc87eaf-85bb-4dad-9145-e13bea0f20aa", "address": "fa:16:3e:fd:9a:d0", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bc87eaf-85", "ovs_interfaceid": "3bc87eaf-85bb-4dad-9145-e13bea0f20aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.310739] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:9a:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bc87eaf-85bb-4dad-9145-e13bea0f20aa', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 554.331143] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Creating folder: Project (628945a5ffec417db819942b4ae96146). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.337816] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ade3891c-558f-4f44-b98c-e4327ca27a6b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.352236] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Created folder: Project (628945a5ffec417db819942b4ae96146) in parent group-v846926. [ 554.354534] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Creating folder: Instances. Parent ref: group-v846930. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 554.354534] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a68c59e1-66d4-41b9-9ea4-bd1e4673cff6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.370628] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Created folder: Instances in parent group-v846930. [ 554.371077] env[61356]: DEBUG oslo.service.loopingcall [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.371548] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 554.371927] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39af1b7c-ffa5-4a64-b0c6-4ac1e61392d5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.398831] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 554.398831] env[61356]: value = "task-4302448" [ 554.398831] env[61356]: _type = "Task" [ 554.398831] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.409506] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302448, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.499107] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Successfully updated port: 8978d6b2-cccb-490f-b5af-e21b78dcf76e {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 554.519133] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquiring lock "refresh_cache-bcf27023-1eb6-4846-9adc-2235c738cad6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.519328] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquired lock "refresh_cache-bcf27023-1eb6-4846-9adc-2235c738cad6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.519566] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.690564] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.766100] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.767030] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.767030] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 554.767125] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 554.786975] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 554.787218] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 554.787424] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 554.787608] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 554.787751] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 554.787878] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 554.790583] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.790583] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.790583] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.790583] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.790583] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.790583] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.790782] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 554.790782] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 554.809117] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.809346] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.809523] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.809712] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 554.810902] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66365deb-f988-4e9e-9306-f2e0e880deec {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.824877] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67629304-727d-4a7f-b6d9-0463e8f5c406 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.850638] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49a1383-5c35-483a-83df-2398ab93020c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.861653] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4066180e-61b5-42ca-bf5e-f059be2b1567 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.903295] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180712MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 554.903556] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.903859] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.916162] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302448, 'name': CreateVM_Task, 'duration_secs': 0.314331} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.916467] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 554.944457] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.944737] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.945170] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 554.946796] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deb16572-f3d8-4bbd-b5d4-1642ce864fb0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.953345] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "63f196ce-f9ad-4120-a96a-fd6824af893b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.953730] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.962103] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Waiting for the task: (returnval){ [ 554.962103] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527184ba-bcf5-7bee-94a2-ecd3b4f255c8" [ 554.962103] env[61356]: _type = "Task" [ 554.962103] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.974704] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527184ba-bcf5-7bee-94a2-ecd3b4f255c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.982561] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 554.998918] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.999113] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 6a2bad10-6948-482a-803a-2da7cf733d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.999251] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance fce0ff0d-d055-422e-ae51-4989abff4b00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.999548] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.999779] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bcf27023-1eb6-4846-9adc-2235c738cad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 555.037160] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 63f196ce-f9ad-4120-a96a-fd6824af893b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.037160] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 555.037160] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=110GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 555.062232] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Updating instance_info_cache with network_info: [{"id": "8978d6b2-cccb-490f-b5af-e21b78dcf76e", "address": "fa:16:3e:bd:2c:66", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8978d6b2-cc", "ovs_interfaceid": "8978d6b2-cccb-490f-b5af-e21b78dcf76e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.079446] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.083639] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Releasing lock "refresh_cache-bcf27023-1eb6-4846-9adc-2235c738cad6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.083639] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Instance network_info: |[{"id": "8978d6b2-cccb-490f-b5af-e21b78dcf76e", "address": "fa:16:3e:bd:2c:66", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8978d6b2-cc", "ovs_interfaceid": "8978d6b2-cccb-490f-b5af-e21b78dcf76e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 555.083860] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:2c:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8978d6b2-cccb-490f-b5af-e21b78dcf76e', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.092886] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Creating folder: Project (ad89b23b217b461d9af2f475c9de1b14). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.096719] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e577aae-bb23-4588-83b1-d39f3a014774 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.108844] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Created folder: Project (ad89b23b217b461d9af2f475c9de1b14) in parent group-v846926. [ 555.109050] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Creating folder: Instances. Parent ref: group-v846933. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.109597] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ebbb0b2-3eb1-4d2a-9f8b-deb9081b2806 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.119043] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Created folder: Instances in parent group-v846933. [ 555.119043] env[61356]: DEBUG oslo.service.loopingcall [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.119043] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 555.119043] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01869973-9ebf-4c40-b183-b4790259b206 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.141203] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.141203] env[61356]: value = "task-4302451" [ 555.141203] env[61356]: _type = "Task" [ 555.141203] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.156768] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302451, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.191392] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177b6f2c-41e6-4193-801d-2254973f760f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.207997] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4e688b-3d41-4d2a-a9ae-36e208a6def4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.239483] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895e903f-02a3-4358-a958-556363c8a160 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.249581] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9d91ac-95a7-4776-83db-b95e00ca5d33 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.263270] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.277042] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.292908] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 555.293183] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.389s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.293796] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.215s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.295313] env[61356]: INFO nova.compute.claims [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.347261] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Updating instance_info_cache with network_info: [{"id": "ee1183ed-658a-4951-85d0-4bb37b62ad0a", "address": "fa:16:3e:b9:55:9d", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee1183ed-65", "ovs_interfaceid": "ee1183ed-658a-4951-85d0-4bb37b62ad0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.364594] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Releasing lock "refresh_cache-ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.364594] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Instance network_info: |[{"id": "ee1183ed-658a-4951-85d0-4bb37b62ad0a", "address": "fa:16:3e:b9:55:9d", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee1183ed-65", "ovs_interfaceid": "ee1183ed-658a-4951-85d0-4bb37b62ad0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 555.364770] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:55:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee1183ed-658a-4951-85d0-4bb37b62ad0a', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.372474] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating folder: Project (a9e03c4102e846819717689589fd7107). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.373377] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe8ffc2e-bed5-47cc-bc9c-adadb0b1fb2c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.389264] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Created folder: Project (a9e03c4102e846819717689589fd7107) in parent group-v846926. [ 555.389264] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating folder: Instances. Parent ref: group-v846936. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 555.389264] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31d7c7f0-1f41-48a4-b3a1-12da9a437bf4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.398621] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Created folder: Instances in parent group-v846936. [ 555.398894] env[61356]: DEBUG oslo.service.loopingcall [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.402046] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 555.402156] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08122f5a-67bc-410a-bf75-c4eb9b168772 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.434270] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.434270] env[61356]: value = "task-4302454" [ 555.434270] env[61356]: _type = "Task" [ 555.434270] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.445500] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302454, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.479600] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.479809] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 555.480045] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.524604] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f36b54-8fd1-41e5-a965-305225c24beb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.533305] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fcb5fe-eb8e-478c-8e80-81ba775444ea {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.584606] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b022eed4-0eaa-4349-ba04-2e32edef46b7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.598522] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc98373e-40d2-45bb-85ef-de11066699ee {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.619427] env[61356]: DEBUG nova.compute.provider_tree [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.642806] env[61356]: DEBUG nova.scheduler.client.report [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.663516] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302451, 'name': CreateVM_Task, 'duration_secs': 0.333407} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.663516] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.663865] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.663865] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.665956] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 555.665956] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-257edf2f-78a2-4006-b256-d6db3adababf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.671176] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Waiting for the task: (returnval){ [ 555.671176] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52b3cb37-15be-49c9-92f7-4a24875d052e" [ 555.671176] env[61356]: _type = "Task" [ 555.671176] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.683640] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52b3cb37-15be-49c9-92f7-4a24875d052e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.717589] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.424s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.718171] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 555.783977] env[61356]: DEBUG nova.compute.utils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.785741] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 555.785922] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 555.807734] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 555.908927] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 555.950345] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302454, 'name': CreateVM_Task, 'duration_secs': 0.335871} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.952647] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.955018] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.955018] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.955018] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.955018] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.955018] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.955481] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.955481] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.955481] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.955481] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.955481] env[61356]: DEBUG nova.virt.hardware [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.955615] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 555.955788] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c3fec2-8357-4640-855e-3d7881c4da0a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.960490] env[61356]: DEBUG nova.policy [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39a6d62c94e84721a98fca5c0b2412c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e90c65dabdd44f6f9c6e524fbf01f380', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 555.963505] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.971390] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ccece5-e522-412b-b875-00f1e25df068 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.183726] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.184194] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 556.184526] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.184950] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.185623] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.185623] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-061b98c2-fd32-4d53-81b6-25338db06abd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.192577] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 556.192577] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]524b9f4a-88fc-8829-a04e-71369d631865" [ 556.192577] env[61356]: _type = "Task" [ 556.192577] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.202330] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]524b9f4a-88fc-8829-a04e-71369d631865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.271200] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Successfully updated port: e1349182-a36a-4f86-8bc7-03e46648d6af {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 556.705756] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.706093] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 556.706093] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.290898] env[61356]: DEBUG nova.compute.manager [req-d5009884-d8a5-4b48-b2a8-e21040fef8e9 req-26a2c67d-d23c-40f5-b735-491c72bbacc3 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Received event network-vif-plugged-ee1183ed-658a-4951-85d0-4bb37b62ad0a {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 557.291182] env[61356]: DEBUG oslo_concurrency.lockutils [req-d5009884-d8a5-4b48-b2a8-e21040fef8e9 req-26a2c67d-d23c-40f5-b735-491c72bbacc3 service nova] Acquiring lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.291384] env[61356]: DEBUG oslo_concurrency.lockutils [req-d5009884-d8a5-4b48-b2a8-e21040fef8e9 req-26a2c67d-d23c-40f5-b735-491c72bbacc3 service nova] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.291556] env[61356]: DEBUG oslo_concurrency.lockutils [req-d5009884-d8a5-4b48-b2a8-e21040fef8e9 req-26a2c67d-d23c-40f5-b735-491c72bbacc3 service nova] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.291723] env[61356]: DEBUG nova.compute.manager [req-d5009884-d8a5-4b48-b2a8-e21040fef8e9 req-26a2c67d-d23c-40f5-b735-491c72bbacc3 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] No waiting events found dispatching network-vif-plugged-ee1183ed-658a-4951-85d0-4bb37b62ad0a {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 557.291942] env[61356]: WARNING nova.compute.manager [req-d5009884-d8a5-4b48-b2a8-e21040fef8e9 req-26a2c67d-d23c-40f5-b735-491c72bbacc3 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Received unexpected event network-vif-plugged-ee1183ed-658a-4951-85d0-4bb37b62ad0a for instance with vm_state building and task_state spawning. [ 557.466847] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Successfully created port: cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 557.499294] env[61356]: DEBUG nova.compute.manager [req-b7d01e4f-4594-4620-9d38-e5a8b1194898 req-496186fe-d141-439e-8ff3-b48d747fef26 service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Received event network-vif-plugged-3bc87eaf-85bb-4dad-9145-e13bea0f20aa {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 557.499294] env[61356]: DEBUG oslo_concurrency.lockutils [req-b7d01e4f-4594-4620-9d38-e5a8b1194898 req-496186fe-d141-439e-8ff3-b48d747fef26 service nova] Acquiring lock "fce0ff0d-d055-422e-ae51-4989abff4b00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.499294] env[61356]: DEBUG oslo_concurrency.lockutils [req-b7d01e4f-4594-4620-9d38-e5a8b1194898 req-496186fe-d141-439e-8ff3-b48d747fef26 service nova] Lock "fce0ff0d-d055-422e-ae51-4989abff4b00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.499294] env[61356]: DEBUG oslo_concurrency.lockutils [req-b7d01e4f-4594-4620-9d38-e5a8b1194898 req-496186fe-d141-439e-8ff3-b48d747fef26 service nova] Lock "fce0ff0d-d055-422e-ae51-4989abff4b00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.499495] env[61356]: DEBUG nova.compute.manager [req-b7d01e4f-4594-4620-9d38-e5a8b1194898 req-496186fe-d141-439e-8ff3-b48d747fef26 service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] No waiting events found dispatching network-vif-plugged-3bc87eaf-85bb-4dad-9145-e13bea0f20aa {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 557.505141] env[61356]: WARNING nova.compute.manager [req-b7d01e4f-4594-4620-9d38-e5a8b1194898 req-496186fe-d141-439e-8ff3-b48d747fef26 service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Received unexpected event network-vif-plugged-3bc87eaf-85bb-4dad-9145-e13bea0f20aa for instance with vm_state building and task_state spawning. [ 560.172158] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "9ca64659-93b6-4585-a7dc-f9152184144e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.172483] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "9ca64659-93b6-4585-a7dc-f9152184144e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.195105] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 560.208826] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Successfully updated port: 90fa8a8d-fa15-4603-a017-f6a48e8506aa {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 560.305571] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.305776] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.310549] env[61356]: INFO nova.compute.claims [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.503521] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688006f6-937a-422b-b295-63915a6d4e32 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.512430] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341f9abe-28e4-4e28-8c1a-cd131a28d65a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.548607] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f110eb1e-db80-4682-b6b3-b2718db3435a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.556724] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4dc747-e112-49ad-b2bd-2a4bab7222e8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.574731] env[61356]: DEBUG nova.compute.provider_tree [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 560.594075] env[61356]: DEBUG nova.scheduler.client.report [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 560.621559] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.623196] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 560.696827] env[61356]: DEBUG nova.compute.utils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 560.699086] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 560.699310] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 560.717341] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 560.836063] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 560.879022] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 560.879022] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 560.879022] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 560.879236] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 560.879236] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 560.879236] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 560.879236] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 560.879236] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 560.879406] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 560.879406] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 560.879406] env[61356]: DEBUG nova.virt.hardware [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 560.882557] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740b05f9-767e-4de4-9283-6fb187a42801 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.894880] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3083330e-a79f-4950-9a91-58986e0b3f2e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.113436] env[61356]: DEBUG nova.policy [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a529ee66f8594f81b484b3d38bca535a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f438f7e3ef643568b8d0fe283737fea', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 561.452805] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Successfully updated port: cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 561.464093] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "refresh_cache-63f196ce-f9ad-4120-a96a-fd6824af893b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.468246] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired lock "refresh_cache-63f196ce-f9ad-4120-a96a-fd6824af893b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.468246] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.655299] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.505423] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Successfully created port: e3ec7337-973b-4ea2-8548-d897d2addc2d {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.684259] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Updating instance_info_cache with network_info: [{"id": "cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3", "address": "fa:16:3e:13:8d:04", "network": {"id": "faeb66a5-72f9-4276-8490-83a2eef57844", "bridge": "br-int", "label": "tempest-ImagesTestJSON-444094988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90c65dabdd44f6f9c6e524fbf01f380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd7be5e1-b4", "ovs_interfaceid": "cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.707987] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Releasing lock "refresh_cache-63f196ce-f9ad-4120-a96a-fd6824af893b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.708355] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Instance network_info: |[{"id": "cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3", "address": "fa:16:3e:13:8d:04", "network": {"id": "faeb66a5-72f9-4276-8490-83a2eef57844", "bridge": "br-int", "label": "tempest-ImagesTestJSON-444094988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90c65dabdd44f6f9c6e524fbf01f380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd7be5e1-b4", "ovs_interfaceid": "cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 562.709163] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:8d:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 562.722108] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating folder: Project (e90c65dabdd44f6f9c6e524fbf01f380). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 562.722816] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8609dea2-7aae-47fd-94f1-5f943a2eb98e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.734462] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Created folder: Project (e90c65dabdd44f6f9c6e524fbf01f380) in parent group-v846926. [ 562.734904] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating folder: Instances. Parent ref: group-v846939. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 562.734904] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02c2a78c-7c70-45be-b38d-85bd2f2341a7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.746318] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Created folder: Instances in parent group-v846939. [ 562.746670] env[61356]: DEBUG oslo.service.loopingcall [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.747193] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 562.747193] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0332e592-b8f0-43de-ae9a-41e87103afc9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.767039] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 562.767039] env[61356]: value = "task-4302457" [ 562.767039] env[61356]: _type = "Task" [ 562.767039] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.775542] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302457, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.284812] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.287737] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.287737] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302457, 'name': CreateVM_Task, 'duration_secs': 0.305716} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.288418] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 563.290465] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.290465] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.290465] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 563.290465] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63249305-4549-4401-85a1-4f4d30304e6e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.300031] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 563.300031] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f4f2ab-0a56-9689-941b-b3acc38ae262" [ 563.300031] env[61356]: _type = "Task" [ 563.300031] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.306668] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 563.321161] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.321161] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 563.321161] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.395973] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.396340] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.398601] env[61356]: INFO nova.compute.claims [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.606196] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Successfully updated port: c22d264b-3153-49b8-a005-dfb6ef240830 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 563.629709] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.632490] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.632490] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 563.645573] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "eed82b3f-6590-4677-a4a2-680d769400fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.646773] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "eed82b3f-6590-4677-a4a2-680d769400fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.005s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.665370] env[61356]: DEBUG nova.compute.manager [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Received event network-changed-ee1183ed-658a-4951-85d0-4bb37b62ad0a {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 563.665370] env[61356]: DEBUG nova.compute.manager [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Refreshing instance network info cache due to event network-changed-ee1183ed-658a-4951-85d0-4bb37b62ad0a. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 563.665746] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Acquiring lock "refresh_cache-ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.666559] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Acquired lock "refresh_cache-ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.666877] env[61356]: DEBUG nova.network.neutron [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Refreshing network info cache for port ee1183ed-658a-4951-85d0-4bb37b62ad0a {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 563.699702] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcdb2df-ce70-45ce-abc1-be6a57d8a857 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.707975] env[61356]: DEBUG nova.compute.manager [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Received event network-changed-3bc87eaf-85bb-4dad-9145-e13bea0f20aa {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 563.708352] env[61356]: DEBUG nova.compute.manager [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Refreshing instance network info cache due to event network-changed-3bc87eaf-85bb-4dad-9145-e13bea0f20aa. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 563.708352] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Acquiring lock "refresh_cache-fce0ff0d-d055-422e-ae51-4989abff4b00" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.708475] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Acquired lock "refresh_cache-fce0ff0d-d055-422e-ae51-4989abff4b00" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.708740] env[61356]: DEBUG nova.network.neutron [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Refreshing network info cache for port 3bc87eaf-85bb-4dad-9145-e13bea0f20aa {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 563.720888] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937a4224-7b5c-4f66-a2c2-0ed5bc51ab9e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.767043] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 563.770670] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803204e0-6bf2-45c5-980f-de61039bec75 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.780095] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460f6528-e5ca-43e3-b0a7-4c5233c5b952 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.803083] env[61356]: DEBUG nova.compute.provider_tree [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.821730] env[61356]: DEBUG nova.scheduler.client.report [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 563.872926] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.476s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.873412] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 563.884159] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.887448] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.887676] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.889236] env[61356]: INFO nova.compute.claims [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.930401] env[61356]: DEBUG nova.compute.utils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.931965] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 563.934841] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 563.948340] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 564.036038] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 564.066878] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 564.067649] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 564.068424] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.068424] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 564.068424] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.068653] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 564.068700] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 564.068857] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 564.069030] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 564.069198] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 564.069495] env[61356]: DEBUG nova.virt.hardware [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 564.070351] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea7bbb0-a86f-4afd-9d01-30cbc558ebf4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.084523] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dc7eb4-b366-4bae-aa84-37a5d12490f1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.166659] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc3c122-2c98-47e1-8931-636a5365ec82 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.174319] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef2fcf-2d91-4de6-ad05-b6952d714067 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.212162] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdca4534-c772-485f-b894-ebd18b72a2e0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.219970] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5d495e-6646-46b8-b270-b24aba38a76c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.233506] env[61356]: DEBUG nova.compute.provider_tree [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.245576] env[61356]: DEBUG nova.scheduler.client.report [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 564.265290] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.376s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.265290] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 564.292633] env[61356]: DEBUG nova.policy [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '175ae8a275fc451e914d6d549611668a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '318f2956b29e4fdc832a0c70c9741bf3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 564.304337] env[61356]: DEBUG nova.compute.utils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 564.305643] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 564.306351] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 564.322437] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 564.433040] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 564.470985] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 564.471331] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 564.471404] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 564.471584] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 564.471773] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 564.471993] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 564.474058] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 564.474058] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 564.474058] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 564.474058] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 564.474058] env[61356]: DEBUG nova.virt.hardware [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 564.474614] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedd0877-eb1d-4d15-9903-5cdbd379ce8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.482956] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da141d03-5482-4629-93da-b1b8e06f7809 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.835892] env[61356]: DEBUG nova.policy [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e088b29e5774eb99306a29b4b4c1cc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2efbf64effd942a58737dd67b9b4a694', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 565.118066] env[61356]: DEBUG nova.network.neutron [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Updated VIF entry in instance network info cache for port 3bc87eaf-85bb-4dad-9145-e13bea0f20aa. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 565.118828] env[61356]: DEBUG nova.network.neutron [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Updating instance_info_cache with network_info: [{"id": "3bc87eaf-85bb-4dad-9145-e13bea0f20aa", "address": "fa:16:3e:fd:9a:d0", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bc87eaf-85", "ovs_interfaceid": "3bc87eaf-85bb-4dad-9145-e13bea0f20aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.140170] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Releasing lock "refresh_cache-fce0ff0d-d055-422e-ae51-4989abff4b00" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.140170] env[61356]: DEBUG nova.compute.manager [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Received event network-vif-plugged-8978d6b2-cccb-490f-b5af-e21b78dcf76e {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 565.140170] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Acquiring lock "bcf27023-1eb6-4846-9adc-2235c738cad6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.140170] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Lock "bcf27023-1eb6-4846-9adc-2235c738cad6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.140538] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Lock "bcf27023-1eb6-4846-9adc-2235c738cad6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.140538] env[61356]: DEBUG nova.compute.manager [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] No waiting events found dispatching network-vif-plugged-8978d6b2-cccb-490f-b5af-e21b78dcf76e {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 565.140538] env[61356]: WARNING nova.compute.manager [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Received unexpected event network-vif-plugged-8978d6b2-cccb-490f-b5af-e21b78dcf76e for instance with vm_state building and task_state spawning. [ 565.140538] env[61356]: DEBUG nova.compute.manager [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Received event network-changed-8978d6b2-cccb-490f-b5af-e21b78dcf76e {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 565.140662] env[61356]: DEBUG nova.compute.manager [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Refreshing instance network info cache due to event network-changed-8978d6b2-cccb-490f-b5af-e21b78dcf76e. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 565.141314] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Acquiring lock "refresh_cache-bcf27023-1eb6-4846-9adc-2235c738cad6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.141638] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Acquired lock "refresh_cache-bcf27023-1eb6-4846-9adc-2235c738cad6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.144092] env[61356]: DEBUG nova.network.neutron [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Refreshing network info cache for port 8978d6b2-cccb-490f-b5af-e21b78dcf76e {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 565.273585] env[61356]: DEBUG nova.network.neutron [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Updated VIF entry in instance network info cache for port ee1183ed-658a-4951-85d0-4bb37b62ad0a. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 565.274192] env[61356]: DEBUG nova.network.neutron [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Updating instance_info_cache with network_info: [{"id": "ee1183ed-658a-4951-85d0-4bb37b62ad0a", "address": "fa:16:3e:b9:55:9d", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee1183ed-65", "ovs_interfaceid": "ee1183ed-658a-4951-85d0-4bb37b62ad0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.293133] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Releasing lock "refresh_cache-ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.293133] env[61356]: DEBUG nova.compute.manager [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received event network-vif-plugged-e1349182-a36a-4f86-8bc7-03e46648d6af {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 565.293133] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Acquiring lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.294175] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.294175] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.294175] env[61356]: DEBUG nova.compute.manager [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] No waiting events found dispatching network-vif-plugged-e1349182-a36a-4f86-8bc7-03e46648d6af {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 565.294337] env[61356]: WARNING nova.compute.manager [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received unexpected event network-vif-plugged-e1349182-a36a-4f86-8bc7-03e46648d6af for instance with vm_state building and task_state spawning. [ 565.294510] env[61356]: DEBUG nova.compute.manager [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received event network-changed-e1349182-a36a-4f86-8bc7-03e46648d6af {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 565.294674] env[61356]: DEBUG nova.compute.manager [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Refreshing instance network info cache due to event network-changed-e1349182-a36a-4f86-8bc7-03e46648d6af. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 565.294850] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Acquiring lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.496321] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Successfully updated port: e3ec7337-973b-4ea2-8548-d897d2addc2d {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 565.554061] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "refresh_cache-9ca64659-93b6-4585-a7dc-f9152184144e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.554061] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquired lock "refresh_cache-9ca64659-93b6-4585-a7dc-f9152184144e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.554061] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 565.706881] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 566.461386] env[61356]: DEBUG nova.network.neutron [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Updated VIF entry in instance network info cache for port 8978d6b2-cccb-490f-b5af-e21b78dcf76e. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 566.461866] env[61356]: DEBUG nova.network.neutron [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Updating instance_info_cache with network_info: [{"id": "8978d6b2-cccb-490f-b5af-e21b78dcf76e", "address": "fa:16:3e:bd:2c:66", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8978d6b2-cc", "ovs_interfaceid": "8978d6b2-cccb-490f-b5af-e21b78dcf76e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.480853] env[61356]: DEBUG oslo_concurrency.lockutils [req-6addb201-ca69-4442-960f-a12644e7a0c1 req-de0f4154-1b16-4985-b390-870e7c8d2e4b service nova] Releasing lock "refresh_cache-bcf27023-1eb6-4846-9adc-2235c738cad6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.495185] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Successfully created port: 1cecbdeb-ed2a-4a58-98af-35f9ff6ad808 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 566.577443] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Updating instance_info_cache with network_info: [{"id": "e3ec7337-973b-4ea2-8548-d897d2addc2d", "address": "fa:16:3e:56:c7:72", "network": {"id": "5a5e0b63-e550-48c7-a3e8-86084f9e4192", "bridge": "br-int", "label": "tempest-ServersTestJSON-1608225518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f438f7e3ef643568b8d0fe283737fea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3ec7337-97", "ovs_interfaceid": "e3ec7337-973b-4ea2-8548-d897d2addc2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.601712] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Releasing lock "refresh_cache-9ca64659-93b6-4585-a7dc-f9152184144e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.601814] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Instance network_info: |[{"id": "e3ec7337-973b-4ea2-8548-d897d2addc2d", "address": "fa:16:3e:56:c7:72", "network": {"id": "5a5e0b63-e550-48c7-a3e8-86084f9e4192", "bridge": "br-int", "label": "tempest-ServersTestJSON-1608225518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f438f7e3ef643568b8d0fe283737fea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3ec7337-97", "ovs_interfaceid": "e3ec7337-973b-4ea2-8548-d897d2addc2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 566.602472] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:c7:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3ec7337-973b-4ea2-8548-d897d2addc2d', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 566.614507] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Creating folder: Project (8f438f7e3ef643568b8d0fe283737fea). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 566.615172] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de712d3e-3dcd-4f3c-8555-312f8c1a5b17 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.628372] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Created folder: Project (8f438f7e3ef643568b8d0fe283737fea) in parent group-v846926. [ 566.628514] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Creating folder: Instances. Parent ref: group-v846942. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 566.629690] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dea88453-9795-4ab0-a1c6-e7afbb78e9ef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.641648] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Created folder: Instances in parent group-v846942. [ 566.641868] env[61356]: DEBUG oslo.service.loopingcall [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.644323] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 566.644323] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfc3e1d6-0ff3-4747-add8-c8a1576ff94a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.668862] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 566.668862] env[61356]: value = "task-4302460" [ 566.668862] env[61356]: _type = "Task" [ 566.668862] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.681146] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302460, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.174636] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updating instance_info_cache with network_info: [{"id": "e1349182-a36a-4f86-8bc7-03e46648d6af", "address": "fa:16:3e:87:da:d0", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1349182-a3", "ovs_interfaceid": "e1349182-a36a-4f86-8bc7-03e46648d6af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "address": "fa:16:3e:78:89:da", "network": {"id": "ae3dded6-4e3a-4df1-8c82-35d743680a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1087160703", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fa8a8d-fa", "ovs_interfaceid": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c22d264b-3153-49b8-a005-dfb6ef240830", "address": "fa:16:3e:ea:da:37", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc22d264b-31", "ovs_interfaceid": "c22d264b-3153-49b8-a005-dfb6ef240830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.181565] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302460, 'name': CreateVM_Task, 'duration_secs': 0.339204} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.182327] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 567.184416] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.184584] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.185334] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 567.185334] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e006779-069e-441f-ad0a-188b99378903 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.192063] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Releasing lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.192390] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Instance network_info: |[{"id": "e1349182-a36a-4f86-8bc7-03e46648d6af", "address": "fa:16:3e:87:da:d0", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1349182-a3", "ovs_interfaceid": "e1349182-a36a-4f86-8bc7-03e46648d6af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "address": "fa:16:3e:78:89:da", "network": {"id": "ae3dded6-4e3a-4df1-8c82-35d743680a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1087160703", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fa8a8d-fa", "ovs_interfaceid": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c22d264b-3153-49b8-a005-dfb6ef240830", "address": "fa:16:3e:ea:da:37", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc22d264b-31", "ovs_interfaceid": "c22d264b-3153-49b8-a005-dfb6ef240830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 567.194627] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Acquired lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.194627] env[61356]: DEBUG nova.network.neutron [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Refreshing network info cache for port e1349182-a36a-4f86-8bc7-03e46648d6af {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 567.196103] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:da:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1349182-a36a-4f86-8bc7-03e46648d6af', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:89:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90fa8a8d-fa15-4603-a017-f6a48e8506aa', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:da:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae18b41f-e73c-44f1-83dd-467c080944f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c22d264b-3153-49b8-a005-dfb6ef240830', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 567.211904] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Creating folder: Project (08984d81a5404c1f9b3f7955a2dc4721). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.214034] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Waiting for the task: (returnval){ [ 567.214034] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521cdbdf-e761-7609-4adf-c18e4844a3c9" [ 567.214034] env[61356]: _type = "Task" [ 567.214034] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.215988] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fc618bc-e4c5-4652-9c0a-a6c64f3e126f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.229126] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.229389] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 567.229607] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.231384] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Created folder: Project (08984d81a5404c1f9b3f7955a2dc4721) in parent group-v846926. [ 567.231595] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Creating folder: Instances. Parent ref: group-v846945. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 567.231793] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1224f694-7d22-4e79-997a-684d3989f62c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.241016] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Created folder: Instances in parent group-v846945. [ 567.241431] env[61356]: DEBUG oslo.service.loopingcall [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.241503] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 567.241752] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1abf5bd-e7e4-44a2-91b1-d0beac54eb15 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.278029] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 567.278029] env[61356]: value = "task-4302463" [ 567.278029] env[61356]: _type = "Task" [ 567.278029] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.292310] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302463, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.382546] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Successfully created port: 9b3e2eec-1d88-4f15-a776-105c4c287691 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.797222] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302463, 'name': CreateVM_Task, 'duration_secs': 0.396416} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.799428] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 567.803198] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.803198] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.803198] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 567.803198] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cae0ae31-1c78-441d-af6d-efc22c3ab414 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.811064] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for the task: (returnval){ [ 567.811064] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521798d7-65c9-b934-c968-6669b0109811" [ 567.811064] env[61356]: _type = "Task" [ 567.811064] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.824911] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521798d7-65c9-b934-c968-6669b0109811, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.322683] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.323240] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 568.323508] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.631148] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Successfully updated port: 1cecbdeb-ed2a-4a58-98af-35f9ff6ad808 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 568.655706] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "refresh_cache-b35cffdf-8926-4a6b-8118-48535e8cb8c0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.655817] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquired lock "refresh_cache-b35cffdf-8926-4a6b-8118-48535e8cb8c0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.655977] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.702573] env[61356]: DEBUG nova.network.neutron [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updated VIF entry in instance network info cache for port e1349182-a36a-4f86-8bc7-03e46648d6af. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 568.702573] env[61356]: DEBUG nova.network.neutron [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updating instance_info_cache with network_info: [{"id": "e1349182-a36a-4f86-8bc7-03e46648d6af", "address": "fa:16:3e:87:da:d0", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1349182-a3", "ovs_interfaceid": "e1349182-a36a-4f86-8bc7-03e46648d6af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "address": "fa:16:3e:78:89:da", "network": {"id": "ae3dded6-4e3a-4df1-8c82-35d743680a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1087160703", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fa8a8d-fa", "ovs_interfaceid": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c22d264b-3153-49b8-a005-dfb6ef240830", "address": "fa:16:3e:ea:da:37", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc22d264b-31", "ovs_interfaceid": "c22d264b-3153-49b8-a005-dfb6ef240830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.715780] env[61356]: DEBUG oslo_concurrency.lockutils [req-e7726d85-5fbf-4742-9313-bd93f837a5f6 req-092b186c-eef5-4346-9c70-3f23758e0f30 service nova] Releasing lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.747644] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.350630] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Updating instance_info_cache with network_info: [{"id": "1cecbdeb-ed2a-4a58-98af-35f9ff6ad808", "address": "fa:16:3e:64:5a:d7", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cecbdeb-ed", "ovs_interfaceid": "1cecbdeb-ed2a-4a58-98af-35f9ff6ad808", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.370224] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Releasing lock "refresh_cache-b35cffdf-8926-4a6b-8118-48535e8cb8c0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.370224] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Instance network_info: |[{"id": "1cecbdeb-ed2a-4a58-98af-35f9ff6ad808", "address": "fa:16:3e:64:5a:d7", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cecbdeb-ed", "ovs_interfaceid": "1cecbdeb-ed2a-4a58-98af-35f9ff6ad808", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 569.370224] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:5a:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cecbdeb-ed2a-4a58-98af-35f9ff6ad808', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 569.380942] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Creating folder: Project (318f2956b29e4fdc832a0c70c9741bf3). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 569.381409] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36cf2636-3b64-452f-bd05-9a49c86eba03 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.396352] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Created folder: Project (318f2956b29e4fdc832a0c70c9741bf3) in parent group-v846926. [ 569.396571] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Creating folder: Instances. Parent ref: group-v846948. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 569.396824] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64b4d5f2-c886-4756-b283-f4aabda1d266 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.405841] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Created folder: Instances in parent group-v846948. [ 569.406208] env[61356]: DEBUG oslo.service.loopingcall [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 569.406286] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 569.406481] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07673b0e-f0b7-41fe-8d15-108857255757 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.432288] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 569.432288] env[61356]: value = "task-4302466" [ 569.432288] env[61356]: _type = "Task" [ 569.432288] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.441222] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302466, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.838021] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Successfully updated port: 9b3e2eec-1d88-4f15-a776-105c4c287691 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 569.856852] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "refresh_cache-eed82b3f-6590-4677-a4a2-680d769400fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.859338] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquired lock "refresh_cache-eed82b3f-6590-4677-a4a2-680d769400fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.859338] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 569.935688] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received event network-vif-plugged-90fa8a8d-fa15-4603-a017-f6a48e8506aa {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 569.936039] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquiring lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.936273] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.936457] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.936645] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] No waiting events found dispatching network-vif-plugged-90fa8a8d-fa15-4603-a017-f6a48e8506aa {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 569.936815] env[61356]: WARNING nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received unexpected event network-vif-plugged-90fa8a8d-fa15-4603-a017-f6a48e8506aa for instance with vm_state building and task_state spawning. [ 569.936966] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received event network-changed-90fa8a8d-fa15-4603-a017-f6a48e8506aa {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 569.939049] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Refreshing instance network info cache due to event network-changed-90fa8a8d-fa15-4603-a017-f6a48e8506aa. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 569.939049] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquiring lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.939049] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquired lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.939049] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Refreshing network info cache for port 90fa8a8d-fa15-4603-a017-f6a48e8506aa {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 569.945621] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.960126] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302466, 'name': CreateVM_Task, 'duration_secs': 0.345639} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.960484] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 569.963987] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.963987] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.963987] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.963987] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b7a5e93-7711-43f0-8927-9a2735510a4e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.969685] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Waiting for the task: (returnval){ [ 569.969685] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5221aecc-8e4e-e767-fc51-87b80c1812c3" [ 569.969685] env[61356]: _type = "Task" [ 569.969685] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.978690] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5221aecc-8e4e-e767-fc51-87b80c1812c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.982420] env[61356]: DEBUG nova.compute.manager [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Received event network-vif-plugged-e3ec7337-973b-4ea2-8548-d897d2addc2d {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 569.982420] env[61356]: DEBUG oslo_concurrency.lockutils [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] Acquiring lock "9ca64659-93b6-4585-a7dc-f9152184144e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.982420] env[61356]: DEBUG oslo_concurrency.lockutils [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] Lock "9ca64659-93b6-4585-a7dc-f9152184144e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.982420] env[61356]: DEBUG oslo_concurrency.lockutils [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] Lock "9ca64659-93b6-4585-a7dc-f9152184144e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.982420] env[61356]: DEBUG nova.compute.manager [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] No waiting events found dispatching network-vif-plugged-e3ec7337-973b-4ea2-8548-d897d2addc2d {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 569.982770] env[61356]: WARNING nova.compute.manager [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Received unexpected event network-vif-plugged-e3ec7337-973b-4ea2-8548-d897d2addc2d for instance with vm_state building and task_state spawning. [ 569.983322] env[61356]: DEBUG nova.compute.manager [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Received event network-changed-e3ec7337-973b-4ea2-8548-d897d2addc2d {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 569.983549] env[61356]: DEBUG nova.compute.manager [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Refreshing instance network info cache due to event network-changed-e3ec7337-973b-4ea2-8548-d897d2addc2d. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 569.983722] env[61356]: DEBUG oslo_concurrency.lockutils [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] Acquiring lock "refresh_cache-9ca64659-93b6-4585-a7dc-f9152184144e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.983860] env[61356]: DEBUG oslo_concurrency.lockutils [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] Acquired lock "refresh_cache-9ca64659-93b6-4585-a7dc-f9152184144e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.984026] env[61356]: DEBUG nova.network.neutron [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Refreshing network info cache for port e3ec7337-973b-4ea2-8548-d897d2addc2d {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 570.486134] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.486970] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 570.487356] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.522680] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Updating instance_info_cache with network_info: [{"id": "9b3e2eec-1d88-4f15-a776-105c4c287691", "address": "fa:16:3e:e4:4a:7a", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3e2eec-1d", "ovs_interfaceid": "9b3e2eec-1d88-4f15-a776-105c4c287691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.540769] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Releasing lock "refresh_cache-eed82b3f-6590-4677-a4a2-680d769400fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.541861] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Instance network_info: |[{"id": "9b3e2eec-1d88-4f15-a776-105c4c287691", "address": "fa:16:3e:e4:4a:7a", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3e2eec-1d", "ovs_interfaceid": "9b3e2eec-1d88-4f15-a776-105c4c287691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 570.541861] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:4a:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b3e2eec-1d88-4f15-a776-105c4c287691', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 570.549720] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Creating folder: Project (2efbf64effd942a58737dd67b9b4a694). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.550623] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6da6bd9b-e105-4f49-b4a5-0e671f73f721 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.561026] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Created folder: Project (2efbf64effd942a58737dd67b9b4a694) in parent group-v846926. [ 570.561433] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Creating folder: Instances. Parent ref: group-v846951. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 570.561531] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3085267d-a78a-452f-afeb-580df196bb29 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.573257] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Created folder: Instances in parent group-v846951. [ 570.574124] env[61356]: DEBUG oslo.service.loopingcall [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 570.574124] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 570.574124] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d53086b2-569f-410d-b23a-d8f1ed2a1eae {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.607568] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 570.607568] env[61356]: value = "task-4302469" [ 570.607568] env[61356]: _type = "Task" [ 570.607568] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.615132] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302469, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.794133] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updated VIF entry in instance network info cache for port 90fa8a8d-fa15-4603-a017-f6a48e8506aa. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 570.797075] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updating instance_info_cache with network_info: [{"id": "e1349182-a36a-4f86-8bc7-03e46648d6af", "address": "fa:16:3e:87:da:d0", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1349182-a3", "ovs_interfaceid": "e1349182-a36a-4f86-8bc7-03e46648d6af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "address": "fa:16:3e:78:89:da", "network": {"id": "ae3dded6-4e3a-4df1-8c82-35d743680a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1087160703", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fa8a8d-fa", "ovs_interfaceid": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c22d264b-3153-49b8-a005-dfb6ef240830", "address": "fa:16:3e:ea:da:37", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc22d264b-31", "ovs_interfaceid": "c22d264b-3153-49b8-a005-dfb6ef240830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.814419] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Releasing lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.814690] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Received event network-vif-plugged-cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 570.814910] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquiring lock "63f196ce-f9ad-4120-a96a-fd6824af893b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.815822] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.816988] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.816988] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] No waiting events found dispatching network-vif-plugged-cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 570.816988] env[61356]: WARNING nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Received unexpected event network-vif-plugged-cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3 for instance with vm_state building and task_state spawning. [ 570.816988] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Received event network-changed-cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 570.817704] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Refreshing instance network info cache due to event network-changed-cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 570.819120] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquiring lock "refresh_cache-63f196ce-f9ad-4120-a96a-fd6824af893b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.819120] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquired lock "refresh_cache-63f196ce-f9ad-4120-a96a-fd6824af893b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.819120] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Refreshing network info cache for port cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 570.853114] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.853317] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.870693] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 570.959816] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.960089] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.961821] env[61356]: INFO nova.compute.claims [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.125190] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302469, 'name': CreateVM_Task} progress is 99%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.202227] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Updated VIF entry in instance network info cache for port cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 571.202227] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Updating instance_info_cache with network_info: [{"id": "cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3", "address": "fa:16:3e:13:8d:04", "network": {"id": "faeb66a5-72f9-4276-8490-83a2eef57844", "bridge": "br-int", "label": "tempest-ImagesTestJSON-444094988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90c65dabdd44f6f9c6e524fbf01f380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd7be5e1-b4", "ovs_interfaceid": "cd7be5e1-b468-4f9f-afa1-2f75ac2b6ab3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.210975] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c8abce-e605-41cf-843a-b5d9bab208dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.215812] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Releasing lock "refresh_cache-63f196ce-f9ad-4120-a96a-fd6824af893b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.216073] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received event network-vif-plugged-c22d264b-3153-49b8-a005-dfb6ef240830 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 571.216272] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquiring lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.216468] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.216792] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.216894] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] No waiting events found dispatching network-vif-plugged-c22d264b-3153-49b8-a005-dfb6ef240830 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 571.217063] env[61356]: WARNING nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received unexpected event network-vif-plugged-c22d264b-3153-49b8-a005-dfb6ef240830 for instance with vm_state building and task_state spawning. [ 571.217269] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Received event network-changed-c22d264b-3153-49b8-a005-dfb6ef240830 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 571.217426] env[61356]: DEBUG nova.compute.manager [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Refreshing instance network info cache due to event network-changed-c22d264b-3153-49b8-a005-dfb6ef240830. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 571.217613] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquiring lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.217751] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Acquired lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.217997] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Refreshing network info cache for port c22d264b-3153-49b8-a005-dfb6ef240830 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 571.229571] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d990a3c-3123-4260-8669-a6d07d1b87e3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.270337] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06cd6d6-0aea-4830-8225-35ef6adc6cae {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.277734] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2ce1e6-1eb7-4486-be51-9de77a541b9c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.292767] env[61356]: DEBUG nova.compute.provider_tree [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.295240] env[61356]: DEBUG nova.network.neutron [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Updated VIF entry in instance network info cache for port e3ec7337-973b-4ea2-8548-d897d2addc2d. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 571.295460] env[61356]: DEBUG nova.network.neutron [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Updating instance_info_cache with network_info: [{"id": "e3ec7337-973b-4ea2-8548-d897d2addc2d", "address": "fa:16:3e:56:c7:72", "network": {"id": "5a5e0b63-e550-48c7-a3e8-86084f9e4192", "bridge": "br-int", "label": "tempest-ServersTestJSON-1608225518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8f438f7e3ef643568b8d0fe283737fea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3ec7337-97", "ovs_interfaceid": "e3ec7337-973b-4ea2-8548-d897d2addc2d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.307464] env[61356]: DEBUG nova.scheduler.client.report [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.312761] env[61356]: DEBUG oslo_concurrency.lockutils [req-d3a65d6d-16c5-4fdb-a3a6-249a951ea31f req-93a601a4-1e79-4c69-b35e-af972baf0c21 service nova] Releasing lock "refresh_cache-9ca64659-93b6-4585-a7dc-f9152184144e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.322691] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.362s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.323285] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 571.383694] env[61356]: DEBUG nova.compute.utils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.385885] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 571.385967] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 571.395893] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 571.486273] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 571.519417] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.519711] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.519873] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.520234] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.520431] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.520587] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.520838] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.520999] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.521199] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.521480] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.521576] env[61356]: DEBUG nova.virt.hardware [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.522737] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d110c76b-8336-4e1e-94e5-c8e7f0745670 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.531768] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a4df10-4efe-4b59-a0f5-1d9d21c6c4bc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.570519] env[61356]: DEBUG nova.policy [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d225870cd7842ccbabd8f172e313b99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdd798c562c749b495a30e34561cdcb6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 571.617939] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302469, 'name': CreateVM_Task} progress is 99%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.121762] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302469, 'name': CreateVM_Task, 'duration_secs': 1.312566} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.122114] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 572.123203] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.123901] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.124402] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.125157] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4832e31-baf8-4f53-bc90-86bb2c6faf2e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.130853] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Waiting for the task: (returnval){ [ 572.130853] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e2ceaa-11cf-ac95-8b37-70c2d3331a70" [ 572.130853] env[61356]: _type = "Task" [ 572.130853] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.140999] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e2ceaa-11cf-ac95-8b37-70c2d3331a70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.179645] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updated VIF entry in instance network info cache for port c22d264b-3153-49b8-a005-dfb6ef240830. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 572.179645] env[61356]: DEBUG nova.network.neutron [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updating instance_info_cache with network_info: [{"id": "e1349182-a36a-4f86-8bc7-03e46648d6af", "address": "fa:16:3e:87:da:d0", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.139", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1349182-a3", "ovs_interfaceid": "e1349182-a36a-4f86-8bc7-03e46648d6af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "address": "fa:16:3e:78:89:da", "network": {"id": "ae3dded6-4e3a-4df1-8c82-35d743680a3c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1087160703", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fa8a8d-fa", "ovs_interfaceid": "90fa8a8d-fa15-4603-a017-f6a48e8506aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c22d264b-3153-49b8-a005-dfb6ef240830", "address": "fa:16:3e:ea:da:37", "network": {"id": "69d89c22-5ab8-4b4d-bf4f-ff5a675035e8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-460109271", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.152", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae18b41f-e73c-44f1-83dd-467c080944f4", "external-id": "nsx-vlan-transportzone-653", "segmentation_id": 653, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc22d264b-31", "ovs_interfaceid": "c22d264b-3153-49b8-a005-dfb6ef240830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.194999] env[61356]: DEBUG oslo_concurrency.lockutils [req-21540c27-1e33-4da3-9260-19527e610a31 req-18d1e4dd-11c4-4fbe-898c-ae935e1d925e service nova] Releasing lock "refresh_cache-6a2bad10-6948-482a-803a-2da7cf733d49" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.647363] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.647363] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.647363] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.953052] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Successfully created port: c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 574.687668] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.687668] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.831926] env[61356]: DEBUG nova.compute.manager [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Received event network-vif-plugged-9b3e2eec-1d88-4f15-a776-105c4c287691 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 574.833221] env[61356]: DEBUG oslo_concurrency.lockutils [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] Acquiring lock "eed82b3f-6590-4677-a4a2-680d769400fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.833221] env[61356]: DEBUG oslo_concurrency.lockutils [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] Lock "eed82b3f-6590-4677-a4a2-680d769400fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.833489] env[61356]: DEBUG oslo_concurrency.lockutils [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] Lock "eed82b3f-6590-4677-a4a2-680d769400fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.833704] env[61356]: DEBUG nova.compute.manager [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] No waiting events found dispatching network-vif-plugged-9b3e2eec-1d88-4f15-a776-105c4c287691 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 574.833902] env[61356]: WARNING nova.compute.manager [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Received unexpected event network-vif-plugged-9b3e2eec-1d88-4f15-a776-105c4c287691 for instance with vm_state building and task_state spawning. [ 574.834104] env[61356]: DEBUG nova.compute.manager [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Received event network-changed-9b3e2eec-1d88-4f15-a776-105c4c287691 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 574.834298] env[61356]: DEBUG nova.compute.manager [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Refreshing instance network info cache due to event network-changed-9b3e2eec-1d88-4f15-a776-105c4c287691. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 574.834511] env[61356]: DEBUG oslo_concurrency.lockutils [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] Acquiring lock "refresh_cache-eed82b3f-6590-4677-a4a2-680d769400fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.834645] env[61356]: DEBUG oslo_concurrency.lockutils [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] Acquired lock "refresh_cache-eed82b3f-6590-4677-a4a2-680d769400fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.835012] env[61356]: DEBUG nova.network.neutron [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Refreshing network info cache for port 9b3e2eec-1d88-4f15-a776-105c4c287691 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 574.856447] env[61356]: DEBUG nova.compute.manager [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Received event network-vif-plugged-1cecbdeb-ed2a-4a58-98af-35f9ff6ad808 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 574.856447] env[61356]: DEBUG oslo_concurrency.lockutils [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] Acquiring lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.856709] env[61356]: DEBUG oslo_concurrency.lockutils [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.857149] env[61356]: DEBUG oslo_concurrency.lockutils [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.857391] env[61356]: DEBUG nova.compute.manager [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] No waiting events found dispatching network-vif-plugged-1cecbdeb-ed2a-4a58-98af-35f9ff6ad808 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 574.857576] env[61356]: WARNING nova.compute.manager [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Received unexpected event network-vif-plugged-1cecbdeb-ed2a-4a58-98af-35f9ff6ad808 for instance with vm_state building and task_state spawning. [ 574.858637] env[61356]: DEBUG nova.compute.manager [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Received event network-changed-1cecbdeb-ed2a-4a58-98af-35f9ff6ad808 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 574.858637] env[61356]: DEBUG nova.compute.manager [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Refreshing instance network info cache due to event network-changed-1cecbdeb-ed2a-4a58-98af-35f9ff6ad808. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 574.858637] env[61356]: DEBUG oslo_concurrency.lockutils [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] Acquiring lock "refresh_cache-b35cffdf-8926-4a6b-8118-48535e8cb8c0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.858637] env[61356]: DEBUG oslo_concurrency.lockutils [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] Acquired lock "refresh_cache-b35cffdf-8926-4a6b-8118-48535e8cb8c0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.858637] env[61356]: DEBUG nova.network.neutron [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Refreshing network info cache for port 1cecbdeb-ed2a-4a58-98af-35f9ff6ad808 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 574.888480] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Successfully updated port: c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 574.910682] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "refresh_cache-8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.910829] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquired lock "refresh_cache-8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.911023] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 575.043060] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.034509] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Updating instance_info_cache with network_info: [{"id": "c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6", "address": "fa:16:3e:3e:3f:44", "network": {"id": "3beca231-2932-4830-a8c8-7ea775576738", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1247659845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdd798c562c749b495a30e34561cdcb6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5bf41e3-b9", "ovs_interfaceid": "c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.064019] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Releasing lock "refresh_cache-8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.064019] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Instance network_info: |[{"id": "c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6", "address": "fa:16:3e:3e:3f:44", "network": {"id": "3beca231-2932-4830-a8c8-7ea775576738", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1247659845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdd798c562c749b495a30e34561cdcb6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5bf41e3-b9", "ovs_interfaceid": "c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 576.064019] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:3f:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ded8bac-871f-491b-94ec-cb67c08bc828', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 576.073203] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Creating folder: Project (bdd798c562c749b495a30e34561cdcb6). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 576.074928] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b982076-e6bc-4d19-9484-715dfc5cb3f0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.085718] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Created folder: Project (bdd798c562c749b495a30e34561cdcb6) in parent group-v846926. [ 576.085986] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Creating folder: Instances. Parent ref: group-v846954. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 576.086152] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cb5b792-9e5c-4ec7-bda0-2f810bc101d1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.095345] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Created folder: Instances in parent group-v846954. [ 576.095398] env[61356]: DEBUG oslo.service.loopingcall [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.095583] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 576.095797] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-577c901e-9c2e-4165-b54e-90a5a19c3efb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.119088] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 576.119088] env[61356]: value = "task-4302472" [ 576.119088] env[61356]: _type = "Task" [ 576.119088] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.128113] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302472, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.188633] env[61356]: DEBUG nova.network.neutron [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Updated VIF entry in instance network info cache for port 1cecbdeb-ed2a-4a58-98af-35f9ff6ad808. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 576.189322] env[61356]: DEBUG nova.network.neutron [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Updating instance_info_cache with network_info: [{"id": "1cecbdeb-ed2a-4a58-98af-35f9ff6ad808", "address": "fa:16:3e:64:5a:d7", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cecbdeb-ed", "ovs_interfaceid": "1cecbdeb-ed2a-4a58-98af-35f9ff6ad808", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.205471] env[61356]: DEBUG oslo_concurrency.lockutils [req-84723726-3d01-4207-8099-65bf9399c93a req-060f4e23-068c-4b31-a954-d868090e41a5 service nova] Releasing lock "refresh_cache-b35cffdf-8926-4a6b-8118-48535e8cb8c0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.307576] env[61356]: DEBUG nova.network.neutron [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Updated VIF entry in instance network info cache for port 9b3e2eec-1d88-4f15-a776-105c4c287691. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 576.307576] env[61356]: DEBUG nova.network.neutron [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Updating instance_info_cache with network_info: [{"id": "9b3e2eec-1d88-4f15-a776-105c4c287691", "address": "fa:16:3e:e4:4a:7a", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3e2eec-1d", "ovs_interfaceid": "9b3e2eec-1d88-4f15-a776-105c4c287691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.318635] env[61356]: DEBUG oslo_concurrency.lockutils [req-add67968-4c31-4252-bc14-c1105c0b576d req-e3d051fe-9ac5-4405-969d-9e3300e80d54 service nova] Releasing lock "refresh_cache-eed82b3f-6590-4677-a4a2-680d769400fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.633185] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302472, 'name': CreateVM_Task} progress is 25%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.130238] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302472, 'name': CreateVM_Task} progress is 99%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.632135] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302472, 'name': CreateVM_Task, 'duration_secs': 1.049293} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.632308] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 577.633066] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.633235] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.633563] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 577.633859] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6e6884f-a400-4693-9ea1-deea3c33d252 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.639491] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Waiting for the task: (returnval){ [ 577.639491] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]529d75bb-9cdb-b71f-ca3c-e9bf1f62b350" [ 577.639491] env[61356]: _type = "Task" [ 577.639491] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.649038] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]529d75bb-9cdb-b71f-ca3c-e9bf1f62b350, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.150582] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.150582] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.150582] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.346821] env[61356]: DEBUG nova.compute.manager [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Received event network-vif-plugged-c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 579.347124] env[61356]: DEBUG oslo_concurrency.lockutils [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] Acquiring lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.348347] env[61356]: DEBUG oslo_concurrency.lockutils [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.348347] env[61356]: DEBUG oslo_concurrency.lockutils [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.348347] env[61356]: DEBUG nova.compute.manager [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] No waiting events found dispatching network-vif-plugged-c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 579.348347] env[61356]: WARNING nova.compute.manager [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Received unexpected event network-vif-plugged-c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6 for instance with vm_state building and task_state spawning. [ 579.348885] env[61356]: DEBUG nova.compute.manager [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Received event network-changed-c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 579.348885] env[61356]: DEBUG nova.compute.manager [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Refreshing instance network info cache due to event network-changed-c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 579.348885] env[61356]: DEBUG oslo_concurrency.lockutils [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] Acquiring lock "refresh_cache-8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.349083] env[61356]: DEBUG oslo_concurrency.lockutils [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] Acquired lock "refresh_cache-8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.349330] env[61356]: DEBUG nova.network.neutron [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Refreshing network info cache for port c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 580.489217] env[61356]: DEBUG nova.network.neutron [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Updated VIF entry in instance network info cache for port c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 580.489217] env[61356]: DEBUG nova.network.neutron [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Updating instance_info_cache with network_info: [{"id": "c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6", "address": "fa:16:3e:3e:3f:44", "network": {"id": "3beca231-2932-4830-a8c8-7ea775576738", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1247659845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bdd798c562c749b495a30e34561cdcb6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5bf41e3-b9", "ovs_interfaceid": "c5bf41e3-b93b-4a6f-b0fe-59ef919bd3a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.506418] env[61356]: DEBUG oslo_concurrency.lockutils [req-f37a3892-7aa2-47ea-83f1-f7020bcf1ffb req-2608e4f4-1faf-4a98-9eed-b478d7226cf8 service nova] Releasing lock "refresh_cache-8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.591051] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.592359] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.394567] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "d1887728-4538-4639-a3e7-db235a2d4765" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.396803] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "d1887728-4538-4639-a3e7-db235a2d4765" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.118217] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.119639] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.308217] env[61356]: DEBUG oslo_concurrency.lockutils [None req-50345d99-1db5-4187-837a-4c8c32d6c78f tempest-ImagesNegativeTestJSON-986467572 tempest-ImagesNegativeTestJSON-986467572-project-member] Acquiring lock "0e648c1e-565d-44cd-a1f5-297b000e42cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.308472] env[61356]: DEBUG oslo_concurrency.lockutils [None req-50345d99-1db5-4187-837a-4c8c32d6c78f tempest-ImagesNegativeTestJSON-986467572 tempest-ImagesNegativeTestJSON-986467572-project-member] Lock "0e648c1e-565d-44cd-a1f5-297b000e42cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.361143] env[61356]: DEBUG oslo_concurrency.lockutils [None req-21018319-9820-4fa8-b8fd-699775ab684b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.361497] env[61356]: DEBUG oslo_concurrency.lockutils [None req-21018319-9820-4fa8-b8fd-699775ab684b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.364334] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f30c8a0e-d4a4-4880-bf3a-4fb2df4d6fbc tempest-FloatingIPsAssociationTestJSON-91698464 tempest-FloatingIPsAssociationTestJSON-91698464-project-member] Acquiring lock "3bf420fa-ef3d-4d99-a0a8-379f59cafa1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.364334] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f30c8a0e-d4a4-4880-bf3a-4fb2df4d6fbc tempest-FloatingIPsAssociationTestJSON-91698464 tempest-FloatingIPsAssociationTestJSON-91698464-project-member] Lock "3bf420fa-ef3d-4d99-a0a8-379f59cafa1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.433637] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3092fe20-3fb8-4a38-aee4-d4cc618235fe tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] Acquiring lock "9f78f9fb-8733-42fe-beb0-9329bf154de8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.433978] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3092fe20-3fb8-4a38-aee4-d4cc618235fe tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] Lock "9f78f9fb-8733-42fe-beb0-9329bf154de8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.526199] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e76801d-9395-4e86-904c-7d2a3d5630cc tempest-ServersWithSpecificFlavorTestJSON-450643974 tempest-ServersWithSpecificFlavorTestJSON-450643974-project-member] Acquiring lock "669be123-adf8-4146-a79f-370347e642c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.526446] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e76801d-9395-4e86-904c-7d2a3d5630cc tempest-ServersWithSpecificFlavorTestJSON-450643974 tempest-ServersWithSpecificFlavorTestJSON-450643974-project-member] Lock "669be123-adf8-4146-a79f-370347e642c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.228630] env[61356]: WARNING oslo_vmware.rw_handles [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 598.228630] env[61356]: ERROR oslo_vmware.rw_handles [ 598.229421] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 598.231469] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 598.231771] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Copying Virtual Disk [datastore2] vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/1a17d543-e4fd-432e-8e4b-dec0fded355f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 598.232145] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db40b9c6-c8e3-40c3-aa84-93c4ac2dc6f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.239954] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Waiting for the task: (returnval){ [ 598.239954] env[61356]: value = "task-4302480" [ 598.239954] env[61356]: _type = "Task" [ 598.239954] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.249577] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Task: {'id': task-4302480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.751170] env[61356]: DEBUG oslo_vmware.exceptions [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 598.751170] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.753984] env[61356]: ERROR nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 598.753984] env[61356]: Faults: ['InvalidArgument'] [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Traceback (most recent call last): [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] yield resources [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self.driver.spawn(context, instance, image_meta, [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self._fetch_image_if_missing(context, vi) [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] image_cache(vi, tmp_image_ds_loc) [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] vm_util.copy_virtual_disk( [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] session._wait_for_task(vmdk_copy_task) [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] return self.wait_for_task(task_ref) [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] return evt.wait() [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] result = hub.switch() [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] return self.greenlet.switch() [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self.f(*self.args, **self.kw) [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] raise exceptions.translate_fault(task_info.error) [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Faults: ['InvalidArgument'] [ 598.753984] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] [ 598.755361] env[61356]: INFO nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Terminating instance [ 598.757702] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.757963] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 598.758294] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad128e43-7b4e-4074-bd81-d139b15013d5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.760903] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquiring lock "refresh_cache-314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.761078] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquired lock "refresh_cache-314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.761261] env[61356]: DEBUG nova.network.neutron [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.768672] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 598.768902] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 598.769797] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec52310b-262e-4249-807d-678b6e909d07 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.780271] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Waiting for the task: (returnval){ [ 598.780271] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52acf5be-d818-5580-fc07-2ff37425625c" [ 598.780271] env[61356]: _type = "Task" [ 598.780271] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.790446] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52acf5be-d818-5580-fc07-2ff37425625c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.800282] env[61356]: DEBUG nova.network.neutron [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.030914] env[61356]: DEBUG nova.network.neutron [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.042763] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Releasing lock "refresh_cache-314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.043226] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 599.043419] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 599.044529] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5640cbdb-a52e-4628-b64b-1a6063d69acc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.052257] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 599.052500] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ce6ed71-8c51-46c8-9643-3578e62ef3a9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.099371] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 599.099655] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 599.099849] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Deleting the datastore file [datastore2] 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 599.100143] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19afe371-1452-42df-a296-6c4a06ef2250 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.106835] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Waiting for the task: (returnval){ [ 599.106835] env[61356]: value = "task-4302483" [ 599.106835] env[61356]: _type = "Task" [ 599.106835] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.117828] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Task: {'id': task-4302483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.291379] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 599.291658] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Creating directory with path [datastore2] vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 599.291879] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0cd8fcfa-fe6b-41e2-a019-1830ebff6ada {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.305135] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Created directory with path [datastore2] vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 599.305135] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Fetch image to [datastore2] vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 599.305135] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 599.306287] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd67ec4-95bb-4676-84ab-10332ebda5ce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.314138] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db04e39-6166-4ea6-83e2-032225d0d021 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.323856] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9315211e-ff8e-4946-b219-cdf4e7ad035a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.356391] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470b6425-422f-47ea-bdf2-e20e59ff6286 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.362701] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f63469aa-3df4-460e-8b95-9db24f9354e7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.384842] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 599.454388] env[61356]: DEBUG oslo_vmware.rw_handles [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 599.519725] env[61356]: DEBUG oslo_vmware.rw_handles [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 599.519916] env[61356]: DEBUG oslo_vmware.rw_handles [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 599.617660] env[61356]: DEBUG oslo_vmware.api [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Task: {'id': task-4302483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039045} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.617894] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 599.618297] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 599.618297] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 599.618624] env[61356]: INFO nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Took 0.58 seconds to destroy the instance on the hypervisor. [ 599.618870] env[61356]: DEBUG oslo.service.loopingcall [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.619182] env[61356]: DEBUG nova.compute.manager [-] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 599.621347] env[61356]: DEBUG nova.compute.claims [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 599.621511] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.621734] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.096328] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67ddfad-fe16-4ea9-8983-6cc4acc55479 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.103910] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e601083-253f-4bbe-974e-9ab2e56bbeb3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.135693] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad34fd76-110a-45ea-904a-64bc75ee21ac {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.143860] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324d4464-291e-40b2-8d8e-beea91bff821 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.158864] env[61356]: DEBUG nova.compute.provider_tree [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.167873] env[61356]: DEBUG nova.scheduler.client.report [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.183681] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.562s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.184504] env[61356]: ERROR nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 600.184504] env[61356]: Faults: ['InvalidArgument'] [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Traceback (most recent call last): [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self.driver.spawn(context, instance, image_meta, [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self._fetch_image_if_missing(context, vi) [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] image_cache(vi, tmp_image_ds_loc) [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] vm_util.copy_virtual_disk( [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] session._wait_for_task(vmdk_copy_task) [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] return self.wait_for_task(task_ref) [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] return evt.wait() [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] result = hub.switch() [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] return self.greenlet.switch() [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] self.f(*self.args, **self.kw) [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] raise exceptions.translate_fault(task_info.error) [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Faults: ['InvalidArgument'] [ 600.184504] env[61356]: ERROR nova.compute.manager [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] [ 600.185397] env[61356]: DEBUG nova.compute.utils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.188986] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Build of instance 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee was re-scheduled: A specified parameter was not correct: fileType [ 600.188986] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 600.190957] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 600.190957] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquiring lock "refresh_cache-314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.190957] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Acquired lock "refresh_cache-314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.190957] env[61356]: DEBUG nova.network.neutron [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.225818] env[61356]: DEBUG nova.network.neutron [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.319808] env[61356]: DEBUG nova.network.neutron [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.329448] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Releasing lock "refresh_cache-314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.329593] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 600.329790] env[61356]: DEBUG nova.compute.manager [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] [instance: 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 600.437983] env[61356]: INFO nova.scheduler.client.report [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Deleted allocations for instance 314f39ce-68c4-4d78-bdb0-0aaeb15dfcee [ 600.481446] env[61356]: DEBUG oslo_concurrency.lockutils [None req-90f7265f-5271-492d-a8fa-e10fb6254f4d tempest-ServersAaction247Test-907619853 tempest-ServersAaction247Test-907619853-project-member] Lock "314f39ce-68c4-4d78-bdb0-0aaeb15dfcee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.128s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.498861] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 600.564481] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.564744] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.566294] env[61356]: INFO nova.compute.claims [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.928271] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e594be3-db3a-49a7-98c6-57fa24f9c552 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.936599] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2ae2b4-c68e-40e4-b2ec-23cba354c341 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.966854] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c899c728-b57f-4d2f-b84e-3d500052b33e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.974474] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9f2380-a061-4a34-9393-71726707a14b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.989029] env[61356]: DEBUG nova.compute.provider_tree [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.002785] env[61356]: DEBUG nova.scheduler.client.report [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 601.022953] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.458s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.023427] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 601.064759] env[61356]: DEBUG nova.compute.utils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.067396] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Not allocating networking since 'none' was specified. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 601.112211] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 601.197989] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 601.231475] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.231717] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.231905] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.232116] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.232268] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.232416] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.232622] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.232784] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.232950] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.233487] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.233698] env[61356]: DEBUG nova.virt.hardware [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.234867] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361853d2-f767-4515-aab8-72b00e2cdc0f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.243776] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda215a0-169a-4168-8823-2e3b4076a566 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.258830] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance VIF info [] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.264805] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Creating folder: Project (e40ecff1f1d143269b1bd988b2f50507). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.265142] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c44c9563-6fbe-49d6-a89e-c0967f168a18 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.278128] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Created folder: Project (e40ecff1f1d143269b1bd988b2f50507) in parent group-v846926. [ 601.278572] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Creating folder: Instances. Parent ref: group-v846961. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.278572] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-277e3beb-9a42-4c58-ac02-3700fdf451c4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.288208] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Created folder: Instances in parent group-v846961. [ 601.288295] env[61356]: DEBUG oslo.service.loopingcall [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.288707] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 601.288707] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9060f691-55cf-4c5e-b26e-be3117f35673 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.307087] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.307087] env[61356]: value = "task-4302487" [ 601.307087] env[61356]: _type = "Task" [ 601.307087] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.316327] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302487, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.817285] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302487, 'name': CreateVM_Task, 'duration_secs': 0.321217} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.817553] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 601.817812] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.817975] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.818293] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 601.818675] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd2b8a6f-a434-416d-9cc3-9f68e94b44fc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.825260] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Waiting for the task: (returnval){ [ 601.825260] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5226675a-0e71-96c6-69d6-75910884f54f" [ 601.825260] env[61356]: _type = "Task" [ 601.825260] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.834636] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5226675a-0e71-96c6-69d6-75910884f54f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.337529] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.338045] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.338282] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.999882] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "1df51a6d-1303-4c89-b61e-95070acf0772" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.001841] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "1df51a6d-1303-4c89-b61e-95070acf0772" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.627579] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c738df3a-cf02-439a-a3e6-307810aa6729 tempest-ServersTestBootFromVolume-1579151348 tempest-ServersTestBootFromVolume-1579151348-project-member] Acquiring lock "ea6b8561-5072-4576-991c-034f106af2df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.628519] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c738df3a-cf02-439a-a3e6-307810aa6729 tempest-ServersTestBootFromVolume-1579151348 tempest-ServersTestBootFromVolume-1579151348-project-member] Lock "ea6b8561-5072-4576-991c-034f106af2df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.640098] env[61356]: DEBUG oslo_concurrency.lockutils [None req-13867a57-af74-4012-9a9f-6d39f4705523 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Acquiring lock "dd4a25f1-b8f0-43a1-8187-56eaddfdd08c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.640533] env[61356]: DEBUG oslo_concurrency.lockutils [None req-13867a57-af74-4012-9a9f-6d39f4705523 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Lock "dd4a25f1-b8f0-43a1-8187-56eaddfdd08c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.803236] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4d38b50f-6fe0-4b39-b05f-a4e6bafe4852 tempest-AttachInterfacesUnderV243Test-345580002 tempest-AttachInterfacesUnderV243Test-345580002-project-member] Acquiring lock "bdfedbb4-25a1-4941-afca-f325e489f9e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.803236] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4d38b50f-6fe0-4b39-b05f-a4e6bafe4852 tempest-AttachInterfacesUnderV243Test-345580002 tempest-AttachInterfacesUnderV243Test-345580002-project-member] Lock "bdfedbb4-25a1-4941-afca-f325e489f9e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.285249] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.321048] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.321269] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.321433] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.321584] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 615.344184] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.344184] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.344184] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.344184] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 615.344184] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbb5839-7a52-4b3a-9440-7ae845836276 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.357651] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13b1f40-c31c-47fa-8bf7-c531556cec7f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.373215] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bf499c-bec9-4ba3-966f-f77bd873693e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.383998] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287d000d-66e3-4d10-9734-3a172c19d2b9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.419444] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180612MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 615.419444] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.419444] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.525052] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.525273] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 6a2bad10-6948-482a-803a-2da7cf733d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.525351] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance fce0ff0d-d055-422e-ae51-4989abff4b00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.525465] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bcf27023-1eb6-4846-9adc-2235c738cad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.525585] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 63f196ce-f9ad-4120-a96a-fd6824af893b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.525702] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9ca64659-93b6-4585-a7dc-f9152184144e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.525820] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.525936] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance eed82b3f-6590-4677-a4a2-680d769400fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.528901] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.529105] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 615.561686] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.574530] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.593330] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.608420] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 0e648c1e-565d-44cd-a1f5-297b000e42cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.621504] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.634734] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bf420fa-ef3d-4d99-a0a8-379f59cafa1e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.661569] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f78f9fb-8733-42fe-beb0-9329bf154de8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.674729] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 669be123-adf8-4146-a79f-370347e642c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.689390] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.703192] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ea6b8561-5072-4576-991c-034f106af2df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.721564] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance dd4a25f1-b8f0-43a1-8187-56eaddfdd08c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.756804] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bdfedbb4-25a1-4941-afca-f325e489f9e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 615.757112] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 615.757284] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 616.158590] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304fd40d-00ff-4e07-96fa-176d7133b87c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.170331] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd6f10d-1d54-43fb-907c-d148c63aaac6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.204171] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b9135c-6b5e-4b0f-a7b1-563a74e15a03 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.213908] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8836cb4-d931-40a4-8247-d9009476d08e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.229642] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.240945] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.261916] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 616.261916] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.844s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.698530] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.699628] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 616.699967] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 616.739600] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.739780] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.739951] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.740102] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.740256] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.740383] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.740532] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.740718] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.740844] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.740997] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 616.741135] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 616.741783] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.742035] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.742209] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 616.759988] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.760226] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 616.999286] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4fc8e0b7-dbc8-4b8e-a524-4d6c4419656b tempest-AttachInterfacesV270Test-953314663 tempest-AttachInterfacesV270Test-953314663-project-member] Acquiring lock "b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.999524] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4fc8e0b7-dbc8-4b8e-a524-4d6c4419656b tempest-AttachInterfacesV270Test-953314663 tempest-AttachInterfacesV270Test-953314663-project-member] Lock "b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.034464] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ea907ebe-69d7-4ec8-a17a-d43651086f7d tempest-ServerAddressesNegativeTestJSON-420863502 tempest-ServerAddressesNegativeTestJSON-420863502-project-member] Acquiring lock "59b0bc79-1c06-441a-a114-5727e3dec5ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.034839] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ea907ebe-69d7-4ec8-a17a-d43651086f7d tempest-ServerAddressesNegativeTestJSON-420863502 tempest-ServerAddressesNegativeTestJSON-420863502-project-member] Lock "59b0bc79-1c06-441a-a114-5727e3dec5ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.857920] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Acquiring lock "35a37ff4-c3a4-404c-a6fa-934d5d2b3846" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.859358] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "35a37ff4-c3a4-404c-a6fa-934d5d2b3846" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.901244] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Acquiring lock "d6df5c9d-0ead-4663-aa53-6b9a325440c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.901435] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "d6df5c9d-0ead-4663-aa53-6b9a325440c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.737364] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3a46dcd9-2481-42f9-a986-5b9647c4aabf tempest-ServerRescueTestJSON-404971253 tempest-ServerRescueTestJSON-404971253-project-member] Acquiring lock "9f3ce44e-34b4-44b5-b2c9-25e66b715594" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.737746] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3a46dcd9-2481-42f9-a986-5b9647c4aabf tempest-ServerRescueTestJSON-404971253 tempest-ServerRescueTestJSON-404971253-project-member] Lock "9f3ce44e-34b4-44b5-b2c9-25e66b715594" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.516846] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6d4d4c46-3961-48f8-93c8-471f552dabc9 tempest-ImagesOneServerTestJSON-1535343616 tempest-ImagesOneServerTestJSON-1535343616-project-member] Acquiring lock "ba1b5aa0-1764-4ca3-b771-36ad0d765a24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.516846] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6d4d4c46-3961-48f8-93c8-471f552dabc9 tempest-ImagesOneServerTestJSON-1535343616 tempest-ImagesOneServerTestJSON-1535343616-project-member] Lock "ba1b5aa0-1764-4ca3-b771-36ad0d765a24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.063695] env[61356]: DEBUG oslo_concurrency.lockutils [None req-585d6040-4e15-4c37-aaab-5a075414154e tempest-ServerDiagnosticsV248Test-1598086578 tempest-ServerDiagnosticsV248Test-1598086578-project-member] Acquiring lock "4366b0b5-62ba-4681-9306-d2d9ff903c3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.064020] env[61356]: DEBUG oslo_concurrency.lockutils [None req-585d6040-4e15-4c37-aaab-5a075414154e tempest-ServerDiagnosticsV248Test-1598086578 tempest-ServerDiagnosticsV248Test-1598086578-project-member] Lock "4366b0b5-62ba-4681-9306-d2d9ff903c3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.249029] env[61356]: WARNING oslo_vmware.rw_handles [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 648.249029] env[61356]: ERROR oslo_vmware.rw_handles [ 648.249029] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 648.249029] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 648.249029] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Copying Virtual Disk [datastore2] vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/c28f5a06-01cc-4a3e-bb08-69acc9b33e69/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 648.250077] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2af82b7c-6f30-4ecb-85d7-118d72003457 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.259706] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Waiting for the task: (returnval){ [ 648.259706] env[61356]: value = "task-4302500" [ 648.259706] env[61356]: _type = "Task" [ 648.259706] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.268653] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Task: {'id': task-4302500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.770296] env[61356]: DEBUG oslo_vmware.exceptions [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 648.770578] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.771155] env[61356]: ERROR nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 648.771155] env[61356]: Faults: ['InvalidArgument'] [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Traceback (most recent call last): [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] yield resources [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self.driver.spawn(context, instance, image_meta, [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self._fetch_image_if_missing(context, vi) [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] image_cache(vi, tmp_image_ds_loc) [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] vm_util.copy_virtual_disk( [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] session._wait_for_task(vmdk_copy_task) [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] return self.wait_for_task(task_ref) [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] return evt.wait() [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] result = hub.switch() [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] return self.greenlet.switch() [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self.f(*self.args, **self.kw) [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] raise exceptions.translate_fault(task_info.error) [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Faults: ['InvalidArgument'] [ 648.771155] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] [ 648.772287] env[61356]: INFO nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Terminating instance [ 648.773095] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.773307] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.773968] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 648.774198] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 648.774436] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9393e2f1-ca6c-46fb-905a-d0953afb4334 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.776973] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ff0928-5dfe-4469-9dee-e1ec191ed9f5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.784481] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 648.784727] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef072fce-7041-4848-a48f-584a5c9617ef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.786955] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.787142] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 648.788078] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f66a74-7fc7-4f33-9652-419cca4fb6c0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.793764] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Waiting for the task: (returnval){ [ 648.793764] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52dfec04-2c51-94cf-fb17-f9338b32d569" [ 648.793764] env[61356]: _type = "Task" [ 648.793764] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.806697] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52dfec04-2c51-94cf-fb17-f9338b32d569, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.878326] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 648.878474] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 648.878653] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Deleting the datastore file [datastore2] fce0ff0d-d055-422e-ae51-4989abff4b00 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 648.879754] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebc480cf-5b1f-4f05-acd1-56dda82a02a6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.885967] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Waiting for the task: (returnval){ [ 648.885967] env[61356]: value = "task-4302502" [ 648.885967] env[61356]: _type = "Task" [ 648.885967] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.894533] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Task: {'id': task-4302502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.304856] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 649.305201] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Creating directory with path [datastore2] vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.305385] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a02ed359-5f91-4913-b303-02b196b1fa17 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.317829] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Created directory with path [datastore2] vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.318063] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Fetch image to [datastore2] vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 649.318226] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 649.318979] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738fc6d1-e469-4a17-b0d1-b964cebe26af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.326659] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9adc77-99a9-49d7-96f0-e9fd48342bd8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.336373] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe6b855-95f8-4e97-ab67-ebc87fbf955e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.368413] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac94b64f-7cc0-46b5-a656-39d10b10d4fc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.375249] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d4608187-8d98-4cad-b88d-94c8af26d097 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.396273] env[61356]: DEBUG oslo_vmware.api [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Task: {'id': task-4302502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072129} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.396534] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 649.396722] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 649.396892] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 649.397075] env[61356]: INFO nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Took 0.62 seconds to destroy the instance on the hypervisor. [ 649.399242] env[61356]: DEBUG nova.compute.claims [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 649.399408] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.399624] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.403817] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 649.464419] env[61356]: DEBUG oslo_vmware.rw_handles [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 649.523205] env[61356]: DEBUG oslo_vmware.rw_handles [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 649.523205] env[61356]: DEBUG oslo_vmware.rw_handles [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 649.853786] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd6111d-5328-4d95-83c5-a2152f2fb052 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.861898] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f029a57-645c-4d75-8cc0-eb1923bead0f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.892565] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e29711-3dc5-493b-ade8-24b00c255400 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.900402] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b13bd97-00a1-4047-b4c0-daa901c3906d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.914602] env[61356]: DEBUG nova.compute.provider_tree [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.923981] env[61356]: DEBUG nova.scheduler.client.report [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.946889] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.547s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.947436] env[61356]: ERROR nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 649.947436] env[61356]: Faults: ['InvalidArgument'] [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Traceback (most recent call last): [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self.driver.spawn(context, instance, image_meta, [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self._fetch_image_if_missing(context, vi) [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] image_cache(vi, tmp_image_ds_loc) [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] vm_util.copy_virtual_disk( [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] session._wait_for_task(vmdk_copy_task) [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] return self.wait_for_task(task_ref) [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] return evt.wait() [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] result = hub.switch() [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] return self.greenlet.switch() [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] self.f(*self.args, **self.kw) [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] raise exceptions.translate_fault(task_info.error) [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Faults: ['InvalidArgument'] [ 649.947436] env[61356]: ERROR nova.compute.manager [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] [ 649.948256] env[61356]: DEBUG nova.compute.utils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 649.949684] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Build of instance fce0ff0d-d055-422e-ae51-4989abff4b00 was re-scheduled: A specified parameter was not correct: fileType [ 649.949684] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 649.950068] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 649.950243] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 649.950397] env[61356]: DEBUG nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 649.950553] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.340950] env[61356]: DEBUG nova.network.neutron [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.355055] env[61356]: INFO nova.compute.manager [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] [instance: fce0ff0d-d055-422e-ae51-4989abff4b00] Took 0.40 seconds to deallocate network for instance. [ 650.457937] env[61356]: INFO nova.scheduler.client.report [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Deleted allocations for instance fce0ff0d-d055-422e-ae51-4989abff4b00 [ 650.494147] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e5c6d6a2-1e83-41a8-beb5-da0f7b34a23a tempest-ServerDiagnosticsTest-739240984 tempest-ServerDiagnosticsTest-739240984-project-member] Lock "fce0ff0d-d055-422e-ae51-4989abff4b00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.849s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.520128] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 650.587948] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.588220] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.589798] env[61356]: INFO nova.compute.claims [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.981483] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquiring lock "04b94227-4b90-4472-a367-39b8c5242558" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.981659] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "04b94227-4b90-4472-a367-39b8c5242558" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.021813] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05766bcc-8303-4bdf-9e46-f312d44d7eaf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.030122] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cfabd4-5773-47b5-b0f6-332a08c43689 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.062408] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466ccafe-b77d-41ea-923e-17f4f444187b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.070755] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096c5d66-f1dc-4479-93b2-608a4d5ed4cf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.084746] env[61356]: DEBUG nova.compute.provider_tree [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.093709] env[61356]: DEBUG nova.scheduler.client.report [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.107023] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.519s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.107513] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 651.140470] env[61356]: DEBUG nova.compute.utils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 651.141823] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 651.141998] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 651.158018] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 651.202081] env[61356]: DEBUG nova.policy [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ca8b8a9a87448d9a3b736f13b738e95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '51b1e97919f44f34b189f11dc6014553', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 651.227500] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 651.258108] env[61356]: DEBUG nova.virt.hardware [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.259669] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97fd0f2-e238-4022-aae2-e9394bb70128 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.268333] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2cb9523-c893-4ddd-a415-c3c1c0076df2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.558213] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Successfully created port: f010ad80-afb8-41c7-97ac-2244d816168a {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.217596] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Successfully updated port: f010ad80-afb8-41c7-97ac-2244d816168a {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 652.238725] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "refresh_cache-58e76dfc-e2f3-41ec-896a-faadb552c1ac" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.239105] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquired lock "refresh_cache-58e76dfc-e2f3-41ec-896a-faadb552c1ac" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.239525] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 652.284362] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.468675] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Updating instance_info_cache with network_info: [{"id": "f010ad80-afb8-41c7-97ac-2244d816168a", "address": "fa:16:3e:e1:98:25", "network": {"id": "f008c7e8-c8c9-4089-9c8b-a9f7903532f8", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1664201566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51b1e97919f44f34b189f11dc6014553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf010ad80-af", "ovs_interfaceid": "f010ad80-afb8-41c7-97ac-2244d816168a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.483285] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Releasing lock "refresh_cache-58e76dfc-e2f3-41ec-896a-faadb552c1ac" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.483586] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Instance network_info: |[{"id": "f010ad80-afb8-41c7-97ac-2244d816168a", "address": "fa:16:3e:e1:98:25", "network": {"id": "f008c7e8-c8c9-4089-9c8b-a9f7903532f8", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1664201566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51b1e97919f44f34b189f11dc6014553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf010ad80-af", "ovs_interfaceid": "f010ad80-afb8-41c7-97ac-2244d816168a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 652.483998] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:98:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f010ad80-afb8-41c7-97ac-2244d816168a', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.493583] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Creating folder: Project (51b1e97919f44f34b189f11dc6014553). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.494337] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-424a9578-55a0-49ee-b2f9-f30d1f1a7ba9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.508355] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Created folder: Project (51b1e97919f44f34b189f11dc6014553) in parent group-v846926. [ 652.508590] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Creating folder: Instances. Parent ref: group-v846968. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 652.508839] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8587c32b-2fc2-4676-b0ab-46dca3ee9f3e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.526163] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Created folder: Instances in parent group-v846968. [ 652.526497] env[61356]: DEBUG oslo.service.loopingcall [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.526717] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 652.527285] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-deec131f-7485-449b-96e3-468dc7d25778 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.551778] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.551778] env[61356]: value = "task-4302505" [ 652.551778] env[61356]: _type = "Task" [ 652.551778] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.560454] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302505, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.065555] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302505, 'name': CreateVM_Task, 'duration_secs': 0.30038} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.065555] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 653.065555] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.065555] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.065555] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.065936] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-871c99a7-4391-4964-9a70-3e2c49faeccc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.071100] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Waiting for the task: (returnval){ [ 653.071100] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]525a138d-6cf7-d232-8f7e-4bbd124813df" [ 653.071100] env[61356]: _type = "Task" [ 653.071100] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.083270] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]525a138d-6cf7-d232-8f7e-4bbd124813df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.135416] env[61356]: DEBUG nova.compute.manager [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Received event network-vif-plugged-f010ad80-afb8-41c7-97ac-2244d816168a {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 653.135511] env[61356]: DEBUG oslo_concurrency.lockutils [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] Acquiring lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.135694] env[61356]: DEBUG oslo_concurrency.lockutils [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.135847] env[61356]: DEBUG oslo_concurrency.lockutils [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.136018] env[61356]: DEBUG nova.compute.manager [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] No waiting events found dispatching network-vif-plugged-f010ad80-afb8-41c7-97ac-2244d816168a {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 653.136191] env[61356]: WARNING nova.compute.manager [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Received unexpected event network-vif-plugged-f010ad80-afb8-41c7-97ac-2244d816168a for instance with vm_state building and task_state spawning. [ 653.136349] env[61356]: DEBUG nova.compute.manager [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Received event network-changed-f010ad80-afb8-41c7-97ac-2244d816168a {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 653.136504] env[61356]: DEBUG nova.compute.manager [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Refreshing instance network info cache due to event network-changed-f010ad80-afb8-41c7-97ac-2244d816168a. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 653.136683] env[61356]: DEBUG oslo_concurrency.lockutils [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] Acquiring lock "refresh_cache-58e76dfc-e2f3-41ec-896a-faadb552c1ac" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.136818] env[61356]: DEBUG oslo_concurrency.lockutils [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] Acquired lock "refresh_cache-58e76dfc-e2f3-41ec-896a-faadb552c1ac" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.136971] env[61356]: DEBUG nova.network.neutron [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Refreshing network info cache for port f010ad80-afb8-41c7-97ac-2244d816168a {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 653.472046] env[61356]: DEBUG nova.network.neutron [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Updated VIF entry in instance network info cache for port f010ad80-afb8-41c7-97ac-2244d816168a. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 653.473654] env[61356]: DEBUG nova.network.neutron [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Updating instance_info_cache with network_info: [{"id": "f010ad80-afb8-41c7-97ac-2244d816168a", "address": "fa:16:3e:e1:98:25", "network": {"id": "f008c7e8-c8c9-4089-9c8b-a9f7903532f8", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1664201566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51b1e97919f44f34b189f11dc6014553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf010ad80-af", "ovs_interfaceid": "f010ad80-afb8-41c7-97ac-2244d816168a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.484460] env[61356]: DEBUG oslo_concurrency.lockutils [req-0e7f04fc-1e5d-4e60-a9e3-2ff984a0d0c7 req-c75691a2-9056-45f8-a5db-91e50f059319 service nova] Releasing lock "refresh_cache-58e76dfc-e2f3-41ec-896a-faadb552c1ac" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.582792] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.583094] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.583286] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.866746] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "2d2dca63-8ab0-4420-90b0-38a330df8271" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.866746] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.758621] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.758910] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.773443] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.773643] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.773834] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.773993] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 674.775145] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c554d8a-10bf-4b71-97f4-32d61b92410e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.784070] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d11af6-3b4a-416d-a01c-94ac382a711d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.799138] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d17bae-740c-4abb-b71d-6da9f74b2429 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.805487] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c98cb49-e088-4dd5-a8d3-5432ae85edda {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.835672] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180697MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 674.835820] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.836020] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.932008] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.932201] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 6a2bad10-6948-482a-803a-2da7cf733d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.932331] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bcf27023-1eb6-4846-9adc-2235c738cad6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.932454] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 63f196ce-f9ad-4120-a96a-fd6824af893b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.932572] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9ca64659-93b6-4585-a7dc-f9152184144e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.932761] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.932890] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance eed82b3f-6590-4677-a4a2-680d769400fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.933018] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.933139] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.933294] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 674.948923] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.959754] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.973680] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 0e648c1e-565d-44cd-a1f5-297b000e42cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.983860] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.993520] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bf420fa-ef3d-4d99-a0a8-379f59cafa1e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.004501] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f78f9fb-8733-42fe-beb0-9329bf154de8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.012877] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 669be123-adf8-4146-a79f-370347e642c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.024686] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.035096] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ea6b8561-5072-4576-991c-034f106af2df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.044602] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance dd4a25f1-b8f0-43a1-8187-56eaddfdd08c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.055125] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bdfedbb4-25a1-4941-afca-f325e489f9e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.065675] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.076053] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 59b0bc79-1c06-441a-a114-5727e3dec5ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.088380] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 35a37ff4-c3a4-404c-a6fa-934d5d2b3846 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.099533] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6df5c9d-0ead-4663-aa53-6b9a325440c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.109579] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f3ce44e-34b4-44b5-b2c9-25e66b715594 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.119626] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ba1b5aa0-1764-4ca3-b771-36ad0d765a24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.130820] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4366b0b5-62ba-4681-9306-d2d9ff903c3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.143755] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 04b94227-4b90-4472-a367-39b8c5242558 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.153777] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.153891] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 675.154040] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 675.505285] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0323f640-ffbf-4d41-bb3e-4f3ca61ff6c4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.512960] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02c1ce7-b365-42c1-85b3-1b546c452c52 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.541748] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68de6341-3323-4dc0-8cf6-0b4d5e11d6cf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.548907] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6494a8f7-23d1-40d7-bf84-ec05b2f147c7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.561574] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.570820] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 675.591455] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 675.591643] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.756s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.591968] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.592246] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.757821] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.758088] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.758182] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.758447] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 677.758447] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 677.778793] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.778985] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.779141] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.779272] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.779408] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.779535] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.779843] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.779843] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.779938] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.780015] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 677.780120] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 677.780632] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 677.780783] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 678.775663] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 695.415968] env[61356]: WARNING oslo_vmware.rw_handles [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 695.415968] env[61356]: ERROR oslo_vmware.rw_handles [ 695.416602] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 695.418103] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 695.418376] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Copying Virtual Disk [datastore2] vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/3ac88021-41d8-4eec-9713-2d1d08aa44d8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 695.418657] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-accc8785-9741-4979-8206-fa66904de297 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.426727] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Waiting for the task: (returnval){ [ 695.426727] env[61356]: value = "task-4302506" [ 695.426727] env[61356]: _type = "Task" [ 695.426727] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.434758] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Task: {'id': task-4302506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.938069] env[61356]: DEBUG oslo_vmware.exceptions [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 695.938069] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.938069] env[61356]: ERROR nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 695.938069] env[61356]: Faults: ['InvalidArgument'] [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Traceback (most recent call last): [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] yield resources [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self.driver.spawn(context, instance, image_meta, [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self._fetch_image_if_missing(context, vi) [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] image_cache(vi, tmp_image_ds_loc) [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] vm_util.copy_virtual_disk( [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] session._wait_for_task(vmdk_copy_task) [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] return self.wait_for_task(task_ref) [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] return evt.wait() [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] result = hub.switch() [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] return self.greenlet.switch() [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self.f(*self.args, **self.kw) [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] raise exceptions.translate_fault(task_info.error) [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Faults: ['InvalidArgument'] [ 695.938069] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] [ 695.939242] env[61356]: INFO nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Terminating instance [ 695.939951] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.940179] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.940415] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0a6560d-4231-4748-b62b-e0061c6d986a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.943730] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 695.944047] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 695.945917] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ca669e-401b-49b3-bc47-e140e2e775ec {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.958415] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 695.959487] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4694dc5d-e214-4d14-9c2c-3f3ebbd8b3bb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.961055] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.961238] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 695.961903] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ddf0b7b-0998-42d6-ad08-d7823ee75277 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.967441] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 695.967441] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521c8c7a-dd0e-362e-ddf8-9643d34938b3" [ 695.967441] env[61356]: _type = "Task" [ 695.967441] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.974711] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521c8c7a-dd0e-362e-ddf8-9643d34938b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.048027] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 696.048027] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 696.048027] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Deleting the datastore file [datastore2] bcf27023-1eb6-4846-9adc-2235c738cad6 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 696.048027] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-908c1194-02f4-49fe-b7ea-d39c7f3f9c78 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.053997] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Waiting for the task: (returnval){ [ 696.053997] env[61356]: value = "task-4302508" [ 696.053997] env[61356]: _type = "Task" [ 696.053997] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.061636] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Task: {'id': task-4302508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.477533] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 696.477853] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating directory with path [datastore2] vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 696.478042] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8453a5a0-f71b-4db2-907e-30a371d9b503 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.489603] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Created directory with path [datastore2] vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 696.489805] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Fetch image to [datastore2] vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 696.489980] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 696.490716] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc8ffb7-fe29-42c3-aa90-7b724ab683b9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.497614] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14113eca-7291-43a9-a1c8-3e4508572a14 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.506055] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6407181-b071-4411-b563-70b7b2dbdccd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.539244] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4164d0d1-5c58-46de-bed0-57fee6e14d40 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.542422] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d1e317f7-7b5e-4ba4-8f53-3e25e1c64484 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.562024] env[61356]: DEBUG oslo_vmware.api [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Task: {'id': task-4302508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078555} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.563311] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 696.563499] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 696.563669] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 696.563839] env[61356]: INFO nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Took 0.62 seconds to destroy the instance on the hypervisor. [ 696.565621] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 696.568615] env[61356]: DEBUG nova.compute.claims [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 696.568615] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.568615] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.619743] env[61356]: DEBUG oslo_vmware.rw_handles [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 696.680838] env[61356]: DEBUG oslo_vmware.rw_handles [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 696.680838] env[61356]: DEBUG oslo_vmware.rw_handles [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 697.053740] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e70e30-7da8-4f88-81b3-75f01ec360da {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.061159] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8061bc97-dcda-442a-9579-296af81dd96a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.089399] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d939fd-b303-4d6a-97dc-9800673d2896 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.096087] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14519890-f170-4820-b7fe-b191374767fa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.108546] env[61356]: DEBUG nova.compute.provider_tree [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.116807] env[61356]: DEBUG nova.scheduler.client.report [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.134019] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.564s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.134019] env[61356]: ERROR nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 697.134019] env[61356]: Faults: ['InvalidArgument'] [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Traceback (most recent call last): [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self.driver.spawn(context, instance, image_meta, [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self._fetch_image_if_missing(context, vi) [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] image_cache(vi, tmp_image_ds_loc) [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] vm_util.copy_virtual_disk( [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] session._wait_for_task(vmdk_copy_task) [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] return self.wait_for_task(task_ref) [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] return evt.wait() [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] result = hub.switch() [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] return self.greenlet.switch() [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] self.f(*self.args, **self.kw) [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] raise exceptions.translate_fault(task_info.error) [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Faults: ['InvalidArgument'] [ 697.134019] env[61356]: ERROR nova.compute.manager [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] [ 697.134019] env[61356]: DEBUG nova.compute.utils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.134969] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Build of instance bcf27023-1eb6-4846-9adc-2235c738cad6 was re-scheduled: A specified parameter was not correct: fileType [ 697.134969] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 697.135157] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 697.135328] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 697.135690] env[61356]: DEBUG nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 697.135690] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 697.635779] env[61356]: DEBUG nova.network.neutron [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.646270] env[61356]: INFO nova.compute.manager [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: bcf27023-1eb6-4846-9adc-2235c738cad6] Took 0.51 seconds to deallocate network for instance. [ 697.759050] env[61356]: INFO nova.scheduler.client.report [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Deleted allocations for instance bcf27023-1eb6-4846-9adc-2235c738cad6 [ 697.785030] env[61356]: DEBUG oslo_concurrency.lockutils [None req-128d5481-8d44-46bb-b4cc-bf4237979520 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "bcf27023-1eb6-4846-9adc-2235c738cad6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.201s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.799590] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 697.856123] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.856383] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.858999] env[61356]: INFO nova.compute.claims [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.247288] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cd173a-91af-4623-bd1a-a32c27ecd4a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.254904] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5503366e-99ac-4310-b333-d1c852716859 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.284021] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077d2dd0-95ac-4403-8be7-192f51ee528d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.290123] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4841066-c5fb-4c34-8d77-3e40a3891242 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.304252] env[61356]: DEBUG nova.compute.provider_tree [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.314065] env[61356]: DEBUG nova.scheduler.client.report [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 698.327397] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.471s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.342633] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "5b58cf8e-3ad3-4dcd-8fbc-c64c3c77b3ee" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.342876] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "5b58cf8e-3ad3-4dcd-8fbc-c64c3c77b3ee" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.348186] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "5b58cf8e-3ad3-4dcd-8fbc-c64c3c77b3ee" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.005s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.348629] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 698.381215] env[61356]: DEBUG nova.compute.utils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.381789] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 698.381963] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 698.390901] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 698.448904] env[61356]: DEBUG nova.policy [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07b5927a6dbe4aa08f432afd214b91eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e63e0f0ca8c0442299eb42826987be07', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 698.454581] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 698.480946] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 698.481203] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 698.481360] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 698.481545] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 698.481693] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 698.481839] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 698.482077] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 698.482244] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 698.482484] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 698.482605] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 698.482785] env[61356]: DEBUG nova.virt.hardware [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 698.483688] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e9242f-9f7e-4a03-b271-551a1499f4f7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.491685] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a792f9-72ef-45ca-b9e9-6b909174a216 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.794111] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Successfully created port: b3fabb72-7dfc-4d3b-9270-fb1d039dde7a {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 699.457228] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Successfully updated port: b3fabb72-7dfc-4d3b-9270-fb1d039dde7a {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.470262] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "refresh_cache-d1887728-4538-4639-a3e7-db235a2d4765" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.470448] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquired lock "refresh_cache-d1887728-4538-4639-a3e7-db235a2d4765" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.471647] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 699.522445] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.622882] env[61356]: DEBUG oslo_concurrency.lockutils [None req-455efbe1-2995-4bd4-9dd7-1584e0087a32 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Acquiring lock "a24ec07d-7e22-4f7e-9e69-9e5f60f505d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.623508] env[61356]: DEBUG oslo_concurrency.lockutils [None req-455efbe1-2995-4bd4-9dd7-1584e0087a32 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "a24ec07d-7e22-4f7e-9e69-9e5f60f505d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.724860] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Updating instance_info_cache with network_info: [{"id": "b3fabb72-7dfc-4d3b-9270-fb1d039dde7a", "address": "fa:16:3e:3a:01:d5", "network": {"id": "48bb52e4-82b2-4640-b9d8-aa374398f327", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-893914480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e63e0f0ca8c0442299eb42826987be07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3fabb72-7d", "ovs_interfaceid": "b3fabb72-7dfc-4d3b-9270-fb1d039dde7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.751702] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Releasing lock "refresh_cache-d1887728-4538-4639-a3e7-db235a2d4765" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.752145] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Instance network_info: |[{"id": "b3fabb72-7dfc-4d3b-9270-fb1d039dde7a", "address": "fa:16:3e:3a:01:d5", "network": {"id": "48bb52e4-82b2-4640-b9d8-aa374398f327", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-893914480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e63e0f0ca8c0442299eb42826987be07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3fabb72-7d", "ovs_interfaceid": "b3fabb72-7dfc-4d3b-9270-fb1d039dde7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 699.753065] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:01:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3fabb72-7dfc-4d3b-9270-fb1d039dde7a', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 699.761491] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Creating folder: Project (e63e0f0ca8c0442299eb42826987be07). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 699.762229] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ec173bd-561e-4cde-a8b3-58c18faad850 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.773614] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Created folder: Project (e63e0f0ca8c0442299eb42826987be07) in parent group-v846926. [ 699.773745] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Creating folder: Instances. Parent ref: group-v846971. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 699.773985] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a02b10b9-9624-4644-abf2-a16157806e07 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.782800] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Created folder: Instances in parent group-v846971. [ 699.783071] env[61356]: DEBUG oslo.service.loopingcall [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.783251] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 699.783453] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-719d4289-6b14-4200-b18e-a5be8fcb2ef1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.803809] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 699.803809] env[61356]: value = "task-4302511" [ 699.803809] env[61356]: _type = "Task" [ 699.803809] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.816892] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302511, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.222254] env[61356]: DEBUG nova.compute.manager [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Received event network-vif-plugged-b3fabb72-7dfc-4d3b-9270-fb1d039dde7a {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 700.222521] env[61356]: DEBUG oslo_concurrency.lockutils [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] Acquiring lock "d1887728-4538-4639-a3e7-db235a2d4765-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.222666] env[61356]: DEBUG oslo_concurrency.lockutils [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] Lock "d1887728-4538-4639-a3e7-db235a2d4765-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.222832] env[61356]: DEBUG oslo_concurrency.lockutils [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] Lock "d1887728-4538-4639-a3e7-db235a2d4765-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.223015] env[61356]: DEBUG nova.compute.manager [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] No waiting events found dispatching network-vif-plugged-b3fabb72-7dfc-4d3b-9270-fb1d039dde7a {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 700.223217] env[61356]: WARNING nova.compute.manager [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Received unexpected event network-vif-plugged-b3fabb72-7dfc-4d3b-9270-fb1d039dde7a for instance with vm_state building and task_state spawning. [ 700.223367] env[61356]: DEBUG nova.compute.manager [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Received event network-changed-b3fabb72-7dfc-4d3b-9270-fb1d039dde7a {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 700.223520] env[61356]: DEBUG nova.compute.manager [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Refreshing instance network info cache due to event network-changed-b3fabb72-7dfc-4d3b-9270-fb1d039dde7a. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 700.223699] env[61356]: DEBUG oslo_concurrency.lockutils [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] Acquiring lock "refresh_cache-d1887728-4538-4639-a3e7-db235a2d4765" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.223895] env[61356]: DEBUG oslo_concurrency.lockutils [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] Acquired lock "refresh_cache-d1887728-4538-4639-a3e7-db235a2d4765" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.224032] env[61356]: DEBUG nova.network.neutron [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Refreshing network info cache for port b3fabb72-7dfc-4d3b-9270-fb1d039dde7a {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 700.313986] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302511, 'name': CreateVM_Task, 'duration_secs': 0.293746} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.315670] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 700.315670] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.315670] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.315670] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 700.315670] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0637f18f-3389-4d65-8192-a48eeb0131a1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.320159] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Waiting for the task: (returnval){ [ 700.320159] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f5df1a-1a14-2db9-a285-8d465fab6005" [ 700.320159] env[61356]: _type = "Task" [ 700.320159] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.327325] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f5df1a-1a14-2db9-a285-8d465fab6005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.510207] env[61356]: DEBUG nova.network.neutron [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Updated VIF entry in instance network info cache for port b3fabb72-7dfc-4d3b-9270-fb1d039dde7a. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 700.510575] env[61356]: DEBUG nova.network.neutron [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Updating instance_info_cache with network_info: [{"id": "b3fabb72-7dfc-4d3b-9270-fb1d039dde7a", "address": "fa:16:3e:3a:01:d5", "network": {"id": "48bb52e4-82b2-4640-b9d8-aa374398f327", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-893914480-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e63e0f0ca8c0442299eb42826987be07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3fabb72-7d", "ovs_interfaceid": "b3fabb72-7dfc-4d3b-9270-fb1d039dde7a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.522160] env[61356]: DEBUG oslo_concurrency.lockutils [req-f4a335f8-d29e-4ebc-9896-7d4e6afafb47 req-bbecd610-ef70-4d43-94f6-ef4e018dc751 service nova] Releasing lock "refresh_cache-d1887728-4538-4639-a3e7-db235a2d4765" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.830336] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.830575] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.830784] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.759622] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 734.772979] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.772979] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.772979] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.772979] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 734.772979] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f306fb-3ee8-48aa-b16f-7d40fc7f527c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.781988] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82494ac5-f744-425a-915f-d65a7b2c5cc7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.796683] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeea1230-0907-4b8c-bf9c-22d0a6973734 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.803566] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403ce879-eaa5-4331-999f-57ac63e0247e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.835171] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180700MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 734.835393] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.835683] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.913403] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.913403] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 6a2bad10-6948-482a-803a-2da7cf733d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.913403] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 63f196ce-f9ad-4120-a96a-fd6824af893b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.913570] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9ca64659-93b6-4585-a7dc-f9152184144e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.913603] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.913728] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance eed82b3f-6590-4677-a4a2-680d769400fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.913829] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.913942] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.914070] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.914188] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 734.926114] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.936732] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 0e648c1e-565d-44cd-a1f5-297b000e42cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.946861] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.958032] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bf420fa-ef3d-4d99-a0a8-379f59cafa1e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.967301] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f78f9fb-8733-42fe-beb0-9329bf154de8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.976859] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 669be123-adf8-4146-a79f-370347e642c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.986679] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.997466] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ea6b8561-5072-4576-991c-034f106af2df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.006765] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance dd4a25f1-b8f0-43a1-8187-56eaddfdd08c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.015910] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bdfedbb4-25a1-4941-afca-f325e489f9e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.025382] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.035058] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 59b0bc79-1c06-441a-a114-5727e3dec5ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.045265] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 35a37ff4-c3a4-404c-a6fa-934d5d2b3846 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.056812] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6df5c9d-0ead-4663-aa53-6b9a325440c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.067966] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f3ce44e-34b4-44b5-b2c9-25e66b715594 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.078251] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ba1b5aa0-1764-4ca3-b771-36ad0d765a24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.088689] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4366b0b5-62ba-4681-9306-d2d9ff903c3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.098595] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 04b94227-4b90-4472-a367-39b8c5242558 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.108161] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.117645] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance a24ec07d-7e22-4f7e-9e69-9e5f60f505d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.117880] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 735.118045] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 735.474008] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e3a291-2c85-4721-b492-5f7b4c1430b2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.481436] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b7e9c2-832b-45b6-8302-ad3c4e1b3312 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.510719] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b4fcb0-b893-4337-8347-afed43d5f854 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.518023] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6039f42-28fc-453a-9728-06729b1f406c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.530714] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.538843] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 735.552012] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 735.552156] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.716s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.552774] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.552774] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.758046] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.758296] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.758390] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.754053] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.757788] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.758067] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 739.758145] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 739.780048] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.780223] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.780493] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.780493] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.780598] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.780704] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.780817] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.781009] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.781063] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.781170] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 739.781310] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 739.782060] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.782168] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 740.778588] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.213685] env[61356]: DEBUG oslo_concurrency.lockutils [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.504910] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "6a2bad10-6948-482a-803a-2da7cf733d49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.853030] env[61356]: WARNING oslo_vmware.rw_handles [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 744.853030] env[61356]: ERROR oslo_vmware.rw_handles [ 744.853638] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 744.855160] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 744.855438] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Copying Virtual Disk [datastore2] vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/ba849d65-56c8-431d-bc9d-0ff6b97677de/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 744.855936] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21362cfc-fabb-4ac5-aa58-4d0f8b0fa886 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.865017] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 744.865017] env[61356]: value = "task-4302512" [ 744.865017] env[61356]: _type = "Task" [ 744.865017] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.874596] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': task-4302512, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.375226] env[61356]: DEBUG oslo_vmware.exceptions [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 745.375620] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.376187] env[61356]: ERROR nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 745.376187] env[61356]: Faults: ['InvalidArgument'] [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Traceback (most recent call last): [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] yield resources [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self.driver.spawn(context, instance, image_meta, [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self._fetch_image_if_missing(context, vi) [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] image_cache(vi, tmp_image_ds_loc) [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] vm_util.copy_virtual_disk( [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] session._wait_for_task(vmdk_copy_task) [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] return self.wait_for_task(task_ref) [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] return evt.wait() [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] result = hub.switch() [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] return self.greenlet.switch() [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self.f(*self.args, **self.kw) [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] raise exceptions.translate_fault(task_info.error) [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Faults: ['InvalidArgument'] [ 745.376187] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] [ 745.376996] env[61356]: INFO nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Terminating instance [ 745.378325] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.378325] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.378968] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 745.379181] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 745.379407] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09871c5c-19c4-4ee6-8f30-3832cde4aa95 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.381676] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e60c5ce-ce09-4c3b-bc3d-c3377ec3e517 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.388693] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 745.389682] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5304bd3-5077-4c99-9846-e232b84509cf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.391045] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.391221] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 745.391921] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7a8a1fd-1ea2-4197-8a5e-26cef580d521 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.396731] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 745.396731] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]524a9713-34ca-df38-0cdc-1e290fbbdb10" [ 745.396731] env[61356]: _type = "Task" [ 745.396731] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.404448] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]524a9713-34ca-df38-0cdc-1e290fbbdb10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.460358] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 745.460604] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 745.460782] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Deleting the datastore file [datastore2] ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 745.461055] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29d18bb7-085a-48ef-adfb-243e947a3bd2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.467213] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 745.467213] env[61356]: value = "task-4302514" [ 745.467213] env[61356]: _type = "Task" [ 745.467213] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.474655] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': task-4302514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.907359] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 745.907662] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating directory with path [datastore2] vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.907858] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a58f9b41-4fb3-4da5-8eb0-9b38721e1515 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.920136] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Created directory with path [datastore2] vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.920347] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Fetch image to [datastore2] vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 745.920552] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 745.921351] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5258ca19-19f2-4138-882f-5a8d35292958 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.927918] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191cace4-7e0b-4516-aee6-14e8a1375c8b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.938347] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837a98e3-f026-4ba7-bf67-41a63bca2d5d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.967911] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1de800-4b5f-4ec9-861e-28a7b100c8ce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.978532] env[61356]: DEBUG oslo_vmware.api [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': task-4302514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076491} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.979049] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 745.979243] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 745.979415] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 745.979592] env[61356]: INFO nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Took 0.60 seconds to destroy the instance on the hypervisor. [ 745.981776] env[61356]: DEBUG nova.compute.claims [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 745.981944] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.982175] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.985131] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-18591293-49b2-496f-8e1b-98cde860fa0f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.006490] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 746.061744] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 746.121135] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 746.121352] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 746.408590] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f502c6ff-24e4-49b6-9cdc-0dc89bf5e784 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.416076] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9222eb2-7a5a-46c5-9249-6650ab5de01e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.445536] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1213283-0046-4e2c-acac-cb5294fc6ebf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.452431] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34e0f9a-7ad3-44ee-b043-fcd2a8c5b781 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.465420] env[61356]: DEBUG nova.compute.provider_tree [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.473592] env[61356]: DEBUG nova.scheduler.client.report [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 746.487734] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.505s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.488301] env[61356]: ERROR nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 746.488301] env[61356]: Faults: ['InvalidArgument'] [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Traceback (most recent call last): [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self.driver.spawn(context, instance, image_meta, [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self._vmops.spawn(context, instance, image_meta, injected_files, [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self._fetch_image_if_missing(context, vi) [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] image_cache(vi, tmp_image_ds_loc) [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] vm_util.copy_virtual_disk( [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] session._wait_for_task(vmdk_copy_task) [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] return self.wait_for_task(task_ref) [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] return evt.wait() [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] result = hub.switch() [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] return self.greenlet.switch() [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] self.f(*self.args, **self.kw) [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] raise exceptions.translate_fault(task_info.error) [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Faults: ['InvalidArgument'] [ 746.488301] env[61356]: ERROR nova.compute.manager [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] [ 746.489088] env[61356]: DEBUG nova.compute.utils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 746.490328] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Build of instance ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 was re-scheduled: A specified parameter was not correct: fileType [ 746.490328] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 746.490704] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 746.490870] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 746.491041] env[61356]: DEBUG nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 746.491224] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 746.865963] env[61356]: DEBUG nova.network.neutron [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.882958] env[61356]: INFO nova.compute.manager [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Took 0.39 seconds to deallocate network for instance. [ 746.994644] env[61356]: INFO nova.scheduler.client.report [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Deleted allocations for instance ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 [ 747.016558] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28eb12a0-d76d-422c-92af-58684d28c0e7 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.095s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.017709] env[61356]: DEBUG oslo_concurrency.lockutils [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.804s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.017978] env[61356]: DEBUG oslo_concurrency.lockutils [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.018149] env[61356]: DEBUG oslo_concurrency.lockutils [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.018316] env[61356]: DEBUG oslo_concurrency.lockutils [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.024031] env[61356]: INFO nova.compute.manager [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Terminating instance [ 747.026692] env[61356]: DEBUG nova.compute.manager [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 747.026878] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 747.027163] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-412254f4-ec26-4c53-b76e-48113ab0c498 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.031280] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 747.038561] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b944ec-8ad6-4244-850e-62b61628c390 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.067719] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef6025e6-dbaf-4cc1-a068-5e2ca1c40254 could not be found. [ 747.067933] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 747.068558] env[61356]: INFO nova.compute.manager [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Took 0.04 seconds to destroy the instance on the hypervisor. [ 747.068558] env[61356]: DEBUG oslo.service.loopingcall [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.068729] env[61356]: DEBUG nova.compute.manager [-] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 747.068729] env[61356]: DEBUG nova.network.neutron [-] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 747.091819] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.092182] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.093913] env[61356]: INFO nova.compute.claims [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.109168] env[61356]: DEBUG nova.network.neutron [-] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.120280] env[61356]: INFO nova.compute.manager [-] [instance: ef6025e6-dbaf-4cc1-a068-5e2ca1c40254] Took 0.05 seconds to deallocate network for instance. [ 747.254878] env[61356]: DEBUG oslo_concurrency.lockutils [None req-814d2b30-d099-44a6-8cf0-ca187d38f760 tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "ef6025e6-dbaf-4cc1-a068-5e2ca1c40254" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.237s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.506235] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbde4ae2-16bd-4111-a0be-259e9ac6d07e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.513897] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50761d73-4668-430e-9c33-0915efb1a6a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.542336] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e44a76-3a20-4e8c-a80b-d2e46b39f67e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.549108] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4111caf3-f6e2-4fa9-ab14-658edf9f509e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.561539] env[61356]: DEBUG nova.compute.provider_tree [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.570398] env[61356]: DEBUG nova.scheduler.client.report [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.585179] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.493s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.585759] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 747.622851] env[61356]: DEBUG nova.compute.utils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.626502] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 747.626690] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 747.636612] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 747.689068] env[61356]: DEBUG nova.policy [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '877bb82afe8d42ae8f54d1e963c0244b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a8faf03438643ad85e91c93c88ce86d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 747.699184] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 747.725394] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.726248] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.726248] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.726248] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.726248] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.726418] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.726690] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.726916] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.727131] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.727709] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.727709] env[61356]: DEBUG nova.virt.hardware [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.728410] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa7ff3a-669b-4901-b95f-e2025cacf438 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.736894] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a387a4c1-bc99-4f11-9226-d7443c6d9d93 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.090560] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Successfully created port: 463e0fe7-b31f-4230-ace6-21906ad2fda0 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.696112] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Successfully updated port: 463e0fe7-b31f-4230-ace6-21906ad2fda0 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.716972] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "refresh_cache-2fb236e9-1c31-4bb9-91e8-cbe367e0a989" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.717130] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "refresh_cache-2fb236e9-1c31-4bb9-91e8-cbe367e0a989" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.717279] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.775300] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.954636] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Updating instance_info_cache with network_info: [{"id": "463e0fe7-b31f-4230-ace6-21906ad2fda0", "address": "fa:16:3e:c1:db:2f", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap463e0fe7-b3", "ovs_interfaceid": "463e0fe7-b31f-4230-ace6-21906ad2fda0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.971973] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "refresh_cache-2fb236e9-1c31-4bb9-91e8-cbe367e0a989" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.972339] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Instance network_info: |[{"id": "463e0fe7-b31f-4230-ace6-21906ad2fda0", "address": "fa:16:3e:c1:db:2f", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap463e0fe7-b3", "ovs_interfaceid": "463e0fe7-b31f-4230-ace6-21906ad2fda0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 748.972937] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:db:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '463e0fe7-b31f-4230-ace6-21906ad2fda0', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.980552] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating folder: Project (8a8faf03438643ad85e91c93c88ce86d). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.981076] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0596490b-944e-4d08-9620-a836665771f5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.991268] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created folder: Project (8a8faf03438643ad85e91c93c88ce86d) in parent group-v846926. [ 748.991520] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating folder: Instances. Parent ref: group-v846974. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 748.991712] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4a9ede0-5c9f-42ff-b4cb-49ff48ca2bb6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.000994] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created folder: Instances in parent group-v846974. [ 749.001240] env[61356]: DEBUG oslo.service.loopingcall [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.001476] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 749.001711] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99df9102-d9ba-4bf8-85d8-d1f88c38659a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.020763] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "22e72346-49c6-4044-ae29-96679d4f2253" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.020989] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "22e72346-49c6-4044-ae29-96679d4f2253" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.024409] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.024409] env[61356]: value = "task-4302517" [ 749.024409] env[61356]: _type = "Task" [ 749.024409] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.032067] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302517, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.534484] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302517, 'name': CreateVM_Task, 'duration_secs': 0.31851} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.534760] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 749.535340] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.535504] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.535848] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 749.536103] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85795db8-e033-41b4-ba73-4d38988317b2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.540610] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 749.540610] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5246d65e-9450-6434-33fb-47e1269e286f" [ 749.540610] env[61356]: _type = "Task" [ 749.540610] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.547800] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5246d65e-9450-6434-33fb-47e1269e286f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.633266] env[61356]: DEBUG nova.compute.manager [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Received event network-vif-plugged-463e0fe7-b31f-4230-ace6-21906ad2fda0 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 749.633266] env[61356]: DEBUG oslo_concurrency.lockutils [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] Acquiring lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.633266] env[61356]: DEBUG oslo_concurrency.lockutils [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.633427] env[61356]: DEBUG oslo_concurrency.lockutils [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.633604] env[61356]: DEBUG nova.compute.manager [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] No waiting events found dispatching network-vif-plugged-463e0fe7-b31f-4230-ace6-21906ad2fda0 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 749.633771] env[61356]: WARNING nova.compute.manager [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Received unexpected event network-vif-plugged-463e0fe7-b31f-4230-ace6-21906ad2fda0 for instance with vm_state building and task_state spawning. [ 749.633926] env[61356]: DEBUG nova.compute.manager [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Received event network-changed-463e0fe7-b31f-4230-ace6-21906ad2fda0 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 749.634088] env[61356]: DEBUG nova.compute.manager [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Refreshing instance network info cache due to event network-changed-463e0fe7-b31f-4230-ace6-21906ad2fda0. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 749.634263] env[61356]: DEBUG oslo_concurrency.lockutils [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] Acquiring lock "refresh_cache-2fb236e9-1c31-4bb9-91e8-cbe367e0a989" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.634391] env[61356]: DEBUG oslo_concurrency.lockutils [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] Acquired lock "refresh_cache-2fb236e9-1c31-4bb9-91e8-cbe367e0a989" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.634606] env[61356]: DEBUG nova.network.neutron [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Refreshing network info cache for port 463e0fe7-b31f-4230-ace6-21906ad2fda0 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 750.053010] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.053279] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.053557] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.093295] env[61356]: DEBUG nova.network.neutron [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Updated VIF entry in instance network info cache for port 463e0fe7-b31f-4230-ace6-21906ad2fda0. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 750.093716] env[61356]: DEBUG nova.network.neutron [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Updating instance_info_cache with network_info: [{"id": "463e0fe7-b31f-4230-ace6-21906ad2fda0", "address": "fa:16:3e:c1:db:2f", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap463e0fe7-b3", "ovs_interfaceid": "463e0fe7-b31f-4230-ace6-21906ad2fda0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.104698] env[61356]: DEBUG oslo_concurrency.lockutils [req-6d18cc38-aa17-4af5-8ca7-16348a45027a req-c0a46641-92b5-476c-a40e-d3ce0e19ee58 service nova] Releasing lock "refresh_cache-2fb236e9-1c31-4bb9-91e8-cbe367e0a989" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.346742] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "63f196ce-f9ad-4120-a96a-fd6824af893b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.198399] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "9ca64659-93b6-4585-a7dc-f9152184144e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.591049] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.707154] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "eed82b3f-6590-4677-a4a2-680d769400fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.394313] env[61356]: DEBUG oslo_concurrency.lockutils [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.444364] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.665694] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.445832] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "d1887728-4538-4639-a3e7-db235a2d4765" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.283793] env[61356]: DEBUG oslo_concurrency.lockutils [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.955940] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.956227] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.760276] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 794.785017] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.785017] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.785017] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.785017] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 794.785017] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa440371-b1c6-45a8-9b87-9cef06cf8c8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.795011] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed15371c-683e-49a7-b8bc-8af5a38bb7dd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.812334] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222c3c10-f485-4884-a149-a2e054b82ebf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.818924] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93bcbe6-550f-4ab1-bc12-52778e0fc2d3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.847704] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180698MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 794.847870] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.849268] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.872315] env[61356]: WARNING oslo_vmware.rw_handles [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 794.872315] env[61356]: ERROR oslo_vmware.rw_handles [ 794.872315] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 794.874863] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 794.874863] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Copying Virtual Disk [datastore2] vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/4f6eef7c-dfe6-4959-bb88-5f16bbebbd01/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 794.875154] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a97f7e4e-965e-49ad-a498-16925915c8f9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.883553] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 794.883553] env[61356]: value = "task-4302518" [ 794.883553] env[61356]: _type = "Task" [ 794.883553] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.893163] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': task-4302518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.935329] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 6a2bad10-6948-482a-803a-2da7cf733d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.935535] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 63f196ce-f9ad-4120-a96a-fd6824af893b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936392] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9ca64659-93b6-4585-a7dc-f9152184144e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936392] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936392] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance eed82b3f-6590-4677-a4a2-680d769400fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936392] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936392] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936392] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936645] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.936645] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 794.949450] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 669be123-adf8-4146-a79f-370347e642c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.960851] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.971576] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ea6b8561-5072-4576-991c-034f106af2df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.982653] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance dd4a25f1-b8f0-43a1-8187-56eaddfdd08c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.993556] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bdfedbb4-25a1-4941-afca-f325e489f9e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.004883] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.016668] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 59b0bc79-1c06-441a-a114-5727e3dec5ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.042271] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 35a37ff4-c3a4-404c-a6fa-934d5d2b3846 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.053807] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6df5c9d-0ead-4663-aa53-6b9a325440c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.065221] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f3ce44e-34b4-44b5-b2c9-25e66b715594 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.077972] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ba1b5aa0-1764-4ca3-b771-36ad0d765a24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.088489] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4366b0b5-62ba-4681-9306-d2d9ff903c3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.098807] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 04b94227-4b90-4472-a367-39b8c5242558 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.107873] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.123356] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance a24ec07d-7e22-4f7e-9e69-9e5f60f505d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.133718] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.145177] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.145177] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 795.145177] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 795.396117] env[61356]: DEBUG oslo_vmware.exceptions [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 795.399541] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.400155] env[61356]: ERROR nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 795.400155] env[61356]: Faults: ['InvalidArgument'] [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Traceback (most recent call last): [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] yield resources [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self.driver.spawn(context, instance, image_meta, [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self._fetch_image_if_missing(context, vi) [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] image_cache(vi, tmp_image_ds_loc) [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] vm_util.copy_virtual_disk( [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] session._wait_for_task(vmdk_copy_task) [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] return self.wait_for_task(task_ref) [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] return evt.wait() [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] result = hub.switch() [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] return self.greenlet.switch() [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self.f(*self.args, **self.kw) [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] raise exceptions.translate_fault(task_info.error) [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Faults: ['InvalidArgument'] [ 795.400155] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] [ 795.401111] env[61356]: INFO nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Terminating instance [ 795.403309] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 795.403556] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 795.404988] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.404988] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.405654] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aef18f6-3f66-40eb-965e-d91d327a6735 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.409910] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-475dc233-ed14-4e85-a788-7cfae30936ef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.416184] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 795.418571] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11d65d04-9994-4647-9695-4ed0024c6241 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.421431] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.421767] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 795.422889] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-879c8a6a-ffa6-4a2e-991f-36871fb9c279 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.428245] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Waiting for the task: (returnval){ [ 795.428245] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c6e4fe-b7a9-bee1-36f3-f689efd5af77" [ 795.428245] env[61356]: _type = "Task" [ 795.428245] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.439326] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c6e4fe-b7a9-bee1-36f3-f689efd5af77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.497795] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 795.498029] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 795.498215] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Deleting the datastore file [datastore2] 63f196ce-f9ad-4120-a96a-fd6824af893b {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.498472] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44088087-5098-4c23-9783-2dcdcbe8917e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.504204] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 795.504204] env[61356]: value = "task-4302520" [ 795.504204] env[61356]: _type = "Task" [ 795.504204] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.513676] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': task-4302520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.581133] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588d4117-0985-461f-a25e-65140a42eff3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.588506] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a495b3a-9818-4ac7-a370-eba5c73b7ae2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.620496] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a63ebb8-7a1c-4c64-ac29-aa15921fc16e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.628511] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f8e988-4257-44a2-a2ef-1570ccba7042 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.642119] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.652482] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 795.676507] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 795.676507] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.828s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.676507] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.676507] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 795.689265] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] There are 0 instances to clean {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 795.689489] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.689630] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances with incomplete migration {{(pid=61356) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 795.700551] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 795.938175] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 795.938451] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Creating directory with path [datastore2] vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.938681] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c7059ae-fcbb-4f14-9ab9-9e8622b73f7c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.950825] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Created directory with path [datastore2] vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.951042] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Fetch image to [datastore2] vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 795.951212] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 795.951986] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1593f8d-f30c-4ac8-94e5-f550fce50813 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.958685] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b208c8-4f9e-4cc1-9b90-32c253255422 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.968892] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d57fbae-a6b1-43c4-a8c1-d2ab4184f762 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.001693] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c55789-9670-4033-926e-99436b7b16c3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.010174] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-89169d71-1ff3-42b7-a1b0-4d6671b6257a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.014270] env[61356]: DEBUG oslo_vmware.api [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': task-4302520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067932} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.014541] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.014739] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 796.014925] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 796.015098] env[61356]: INFO nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 796.017437] env[61356]: DEBUG nova.compute.claims [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 796.017603] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.017808] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.037272] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 796.104520] env[61356]: DEBUG oslo_vmware.rw_handles [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 796.167239] env[61356]: DEBUG oslo_vmware.rw_handles [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 796.167424] env[61356]: DEBUG oslo_vmware.rw_handles [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 796.522645] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f247f46-e4af-44a4-934f-0c4501222eac {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.532044] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bab520-29e3-4821-901a-0fff5e3f6e75 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.564597] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080734f8-1532-4d81-a569-8483c992b500 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.575271] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ea2aa3-0a63-40f6-a0a1-0dc76ad40937 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.592797] env[61356]: DEBUG nova.compute.provider_tree [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.602806] env[61356]: DEBUG nova.scheduler.client.report [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 796.626401] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.607s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.626401] env[61356]: ERROR nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 796.626401] env[61356]: Faults: ['InvalidArgument'] [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Traceback (most recent call last): [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self.driver.spawn(context, instance, image_meta, [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self._fetch_image_if_missing(context, vi) [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] image_cache(vi, tmp_image_ds_loc) [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] vm_util.copy_virtual_disk( [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] session._wait_for_task(vmdk_copy_task) [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] return self.wait_for_task(task_ref) [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] return evt.wait() [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] result = hub.switch() [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] return self.greenlet.switch() [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] self.f(*self.args, **self.kw) [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] raise exceptions.translate_fault(task_info.error) [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Faults: ['InvalidArgument'] [ 796.626401] env[61356]: ERROR nova.compute.manager [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] [ 796.626401] env[61356]: DEBUG nova.compute.utils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 796.628749] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Build of instance 63f196ce-f9ad-4120-a96a-fd6824af893b was re-scheduled: A specified parameter was not correct: fileType [ 796.628749] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 796.629204] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 796.629702] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 796.629920] env[61356]: DEBUG nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 796.630269] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 796.707525] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.758581] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.758581] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.186855] env[61356]: DEBUG nova.network.neutron [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.200040] env[61356]: INFO nova.compute.manager [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Took 0.57 seconds to deallocate network for instance. [ 797.318496] env[61356]: INFO nova.scheduler.client.report [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Deleted allocations for instance 63f196ce-f9ad-4120-a96a-fd6824af893b [ 797.360619] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9bc83a9c-b0ea-4cad-ba6d-45e08da3ada9 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.407s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.362332] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 41.016s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.362578] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "63f196ce-f9ad-4120-a96a-fd6824af893b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.362781] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.362947] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.365111] env[61356]: INFO nova.compute.manager [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Terminating instance [ 797.371851] env[61356]: DEBUG nova.compute.manager [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 797.373068] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 797.373540] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd65683a-c619-45a8-96c2-9416a98aea09 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.383725] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bea467e-f9d3-44d6-9810-e0960080ca27 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.398459] env[61356]: DEBUG nova.compute.manager [None req-50345d99-1db5-4187-837a-4c8c32d6c78f tempest-ImagesNegativeTestJSON-986467572 tempest-ImagesNegativeTestJSON-986467572-project-member] [instance: 0e648c1e-565d-44cd-a1f5-297b000e42cc] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.420368] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63f196ce-f9ad-4120-a96a-fd6824af893b could not be found. [ 797.420368] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 797.420368] env[61356]: INFO nova.compute.manager [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 797.420734] env[61356]: DEBUG oslo.service.loopingcall [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.424021] env[61356]: DEBUG nova.compute.manager [-] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 797.424021] env[61356]: DEBUG nova.network.neutron [-] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 797.431357] env[61356]: DEBUG nova.compute.manager [None req-50345d99-1db5-4187-837a-4c8c32d6c78f tempest-ImagesNegativeTestJSON-986467572 tempest-ImagesNegativeTestJSON-986467572-project-member] [instance: 0e648c1e-565d-44cd-a1f5-297b000e42cc] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.451174] env[61356]: DEBUG nova.network.neutron [-] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.461481] env[61356]: DEBUG oslo_concurrency.lockutils [None req-50345d99-1db5-4187-837a-4c8c32d6c78f tempest-ImagesNegativeTestJSON-986467572 tempest-ImagesNegativeTestJSON-986467572-project-member] Lock "0e648c1e-565d-44cd-a1f5-297b000e42cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.153s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.463335] env[61356]: INFO nova.compute.manager [-] [instance: 63f196ce-f9ad-4120-a96a-fd6824af893b] Took 0.04 seconds to deallocate network for instance. [ 797.500402] env[61356]: DEBUG nova.compute.manager [None req-21018319-9820-4fa8-b8fd-699775ab684b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.541550] env[61356]: DEBUG nova.compute.manager [None req-21018319-9820-4fa8-b8fd-699775ab684b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.576988] env[61356]: DEBUG oslo_concurrency.lockutils [None req-21018319-9820-4fa8-b8fd-699775ab684b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "b8c9bec1-bf00-4e2f-b845-68bf3d75e9f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.215s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.597765] env[61356]: DEBUG nova.compute.manager [None req-f30c8a0e-d4a4-4880-bf3a-4fb2df4d6fbc tempest-FloatingIPsAssociationTestJSON-91698464 tempest-FloatingIPsAssociationTestJSON-91698464-project-member] [instance: 3bf420fa-ef3d-4d99-a0a8-379f59cafa1e] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.602162] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da6711c4-57f6-4bb0-8056-9c385666561b tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "63f196ce-f9ad-4120-a96a-fd6824af893b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.239s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.626829] env[61356]: DEBUG nova.compute.manager [None req-f30c8a0e-d4a4-4880-bf3a-4fb2df4d6fbc tempest-FloatingIPsAssociationTestJSON-91698464 tempest-FloatingIPsAssociationTestJSON-91698464-project-member] [instance: 3bf420fa-ef3d-4d99-a0a8-379f59cafa1e] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.652601] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f30c8a0e-d4a4-4880-bf3a-4fb2df4d6fbc tempest-FloatingIPsAssociationTestJSON-91698464 tempest-FloatingIPsAssociationTestJSON-91698464-project-member] Lock "3bf420fa-ef3d-4d99-a0a8-379f59cafa1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.288s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.664158] env[61356]: DEBUG nova.compute.manager [None req-3092fe20-3fb8-4a38-aee4-d4cc618235fe tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] [instance: 9f78f9fb-8733-42fe-beb0-9329bf154de8] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.702547] env[61356]: DEBUG nova.compute.manager [None req-3092fe20-3fb8-4a38-aee4-d4cc618235fe tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] [instance: 9f78f9fb-8733-42fe-beb0-9329bf154de8] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.742773] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3092fe20-3fb8-4a38-aee4-d4cc618235fe tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] Lock "9f78f9fb-8733-42fe-beb0-9329bf154de8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.309s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.765856] env[61356]: DEBUG nova.compute.manager [None req-5e76801d-9395-4e86-904c-7d2a3d5630cc tempest-ServersWithSpecificFlavorTestJSON-450643974 tempest-ServersWithSpecificFlavorTestJSON-450643974-project-member] [instance: 669be123-adf8-4146-a79f-370347e642c9] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.800902] env[61356]: DEBUG nova.compute.manager [None req-5e76801d-9395-4e86-904c-7d2a3d5630cc tempest-ServersWithSpecificFlavorTestJSON-450643974 tempest-ServersWithSpecificFlavorTestJSON-450643974-project-member] [instance: 669be123-adf8-4146-a79f-370347e642c9] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 797.838346] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e76801d-9395-4e86-904c-7d2a3d5630cc tempest-ServersWithSpecificFlavorTestJSON-450643974 tempest-ServersWithSpecificFlavorTestJSON-450643974-project-member] Lock "669be123-adf8-4146-a79f-370347e642c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.312s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.854969] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 797.933791] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.933791] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.935204] env[61356]: INFO nova.compute.claims [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.419703] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3f966f-5fb3-4846-8cf0-f5de7854b1f4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.433156] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c14ed0-972e-4d57-a925-f9ea3758d73d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.469738] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cff2bcf-f042-426c-b214-ded61371386d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.477714] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18da48ac-30d2-42df-9416-3d302ea553e5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.491122] env[61356]: DEBUG nova.compute.provider_tree [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.501046] env[61356]: DEBUG nova.scheduler.client.report [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.521954] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.588s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.522485] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 798.572285] env[61356]: DEBUG nova.compute.utils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.573654] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Not allocating networking since 'none' was specified. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 798.584703] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 798.662186] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 798.703402] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 798.704216] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 798.704512] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.704815] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 798.705133] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.705779] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 798.709020] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 798.709020] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 798.709020] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 798.709020] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 798.709020] env[61356]: DEBUG nova.virt.hardware [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 798.709020] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e7cf99-dbfa-45c2-8185-1c229386595a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.717218] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d69220-6ed1-43c1-b3cc-fbff488e3790 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.734978] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance VIF info [] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.741914] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Creating folder: Project (8c4902faf11e434d91216250093d8660). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.741914] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a122bc5a-ff1e-451f-bc20-7d8eb8f1ce4c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.753135] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Created folder: Project (8c4902faf11e434d91216250093d8660) in parent group-v846926. [ 798.753135] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Creating folder: Instances. Parent ref: group-v846977. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 798.753335] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bddf344e-d0d7-41af-b451-f000e54a73cb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.759949] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.760105] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.766791] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Created folder: Instances in parent group-v846977. [ 798.766791] env[61356]: DEBUG oslo.service.loopingcall [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 798.767208] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 798.767517] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-672b4545-c603-4d88-b623-1abe6a6bab17 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.788788] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 798.788788] env[61356]: value = "task-4302523" [ 798.788788] env[61356]: _type = "Task" [ 798.788788] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.796972] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302523, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.302071] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302523, 'name': CreateVM_Task, 'duration_secs': 0.282089} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.302289] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 799.302887] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.302999] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.303350] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 799.303633] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afe49314-c4f6-42c1-b67e-a6b7a28ff2c8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.308677] env[61356]: DEBUG oslo_vmware.api [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Waiting for the task: (returnval){ [ 799.308677] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f68b3a-a6c2-ed48-10f9-4ae871683fda" [ 799.308677] env[61356]: _type = "Task" [ 799.308677] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.316680] env[61356]: DEBUG oslo_vmware.api [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f68b3a-a6c2-ed48-10f9-4ae871683fda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.758640] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.758925] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 799.758925] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 799.787567] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.787676] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.787843] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.787931] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.788618] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.789033] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.789105] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.789237] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.793031] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.793031] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 799.793031] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 799.793530] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.793752] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 799.822143] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.822143] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.822299] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.844974] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "1353d492-e9f0-4495-83e1-6832bb03d5db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.845283] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.063683] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8dba2bfb-0a72-4f20-9a77-6531b60e2034 tempest-FloatingIPsAssociationNegativeTestJSON-792199732 tempest-FloatingIPsAssociationNegativeTestJSON-792199732-project-member] Acquiring lock "7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.064088] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8dba2bfb-0a72-4f20-9a77-6531b60e2034 tempest-FloatingIPsAssociationNegativeTestJSON-792199732 tempest-FloatingIPsAssociationNegativeTestJSON-792199732-project-member] Lock "7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.788390] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.850964] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da479aa7-0997-4ce9-a751-472048829ba9 tempest-ServersTestManualDisk-1291211396 tempest-ServersTestManualDisk-1291211396-project-member] Acquiring lock "42716a31-0ced-493f-b7f7-5576ef2babd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.851226] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da479aa7-0997-4ce9-a751-472048829ba9 tempest-ServersTestManualDisk-1291211396 tempest-ServersTestManualDisk-1291211396-project-member] Lock "42716a31-0ced-493f-b7f7-5576ef2babd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.549662] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "1df51a6d-1303-4c89-b61e-95070acf0772" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.452621] env[61356]: DEBUG oslo_concurrency.lockutils [None req-475f2f4d-6434-45d2-8b3e-e7ce89e155fb tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] Acquiring lock "79969f15-053a-4567-b705-fbfca33b6ee8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.452952] env[61356]: DEBUG oslo_concurrency.lockutils [None req-475f2f4d-6434-45d2-8b3e-e7ce89e155fb tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] Lock "79969f15-053a-4567-b705-fbfca33b6ee8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.394623] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e275a1b1-f4aa-4f07-8ec2-ab6f084ac02e tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Acquiring lock "b5cebdbc-f815-4b93-ac2a-27f0121966a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.394949] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e275a1b1-f4aa-4f07-8ec2-ab6f084ac02e tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Lock "b5cebdbc-f815-4b93-ac2a-27f0121966a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.169547] env[61356]: DEBUG oslo_concurrency.lockutils [None req-71b143b2-2700-418e-bf81-def98c010a19 tempest-ServerMetadataNegativeTestJSON-1051451279 tempest-ServerMetadataNegativeTestJSON-1051451279-project-member] Acquiring lock "4053d28b-837f-4e5c-bdbe-10f10cb3ce04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.169831] env[61356]: DEBUG oslo_concurrency.lockutils [None req-71b143b2-2700-418e-bf81-def98c010a19 tempest-ServerMetadataNegativeTestJSON-1051451279 tempest-ServerMetadataNegativeTestJSON-1051451279-project-member] Lock "4053d28b-837f-4e5c-bdbe-10f10cb3ce04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.584307] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d7e349c-fb99-4739-b720-d82cc883c6e1 tempest-ServersNegativeTestMultiTenantJSON-1341881425 tempest-ServersNegativeTestMultiTenantJSON-1341881425-project-member] Acquiring lock "4f8c5de0-e819-4702-b871-619e221eeb4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.584613] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d7e349c-fb99-4739-b720-d82cc883c6e1 tempest-ServersNegativeTestMultiTenantJSON-1341881425 tempest-ServersNegativeTestMultiTenantJSON-1341881425-project-member] Lock "4f8c5de0-e819-4702-b871-619e221eeb4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.235338] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c271f1af-2273-4b4b-88b6-f86c13eaf1d6 tempest-ServersV294TestFqdnHostnames-1674490250 tempest-ServersV294TestFqdnHostnames-1674490250-project-member] Acquiring lock "612128fe-2d34-46e7-9496-f6c85dcf4ca4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.235569] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c271f1af-2273-4b4b-88b6-f86c13eaf1d6 tempest-ServersV294TestFqdnHostnames-1674490250 tempest-ServersV294TestFqdnHostnames-1674490250-project-member] Lock "612128fe-2d34-46e7-9496-f6c85dcf4ca4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.220268] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Acquiring lock "acc5ec07-0d2f-413c-ab18-07876f43d786" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.221230] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "acc5ec07-0d2f-413c-ab18-07876f43d786" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.246699] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Acquiring lock "a76f796d-63d2-41ad-b800-629b68c00694" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.246699] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "a76f796d-63d2-41ad-b800-629b68c00694" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.179634] env[61356]: DEBUG oslo_concurrency.lockutils [None req-367571d2-573e-4b22-942c-29bd9fc18980 tempest-InstanceActionsTestJSON-1335029912 tempest-InstanceActionsTestJSON-1335029912-project-member] Acquiring lock "7babcc84-5bde-464c-9ac8-c832cb3887f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.179963] env[61356]: DEBUG oslo_concurrency.lockutils [None req-367571d2-573e-4b22-942c-29bd9fc18980 tempest-InstanceActionsTestJSON-1335029912 tempest-InstanceActionsTestJSON-1335029912-project-member] Lock "7babcc84-5bde-464c-9ac8-c832cb3887f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.322017] env[61356]: WARNING oslo_vmware.rw_handles [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 843.322017] env[61356]: ERROR oslo_vmware.rw_handles [ 843.322724] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 843.324281] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 843.324570] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Copying Virtual Disk [datastore2] vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/54cafb01-506f-48df-9719-d6f68af0bfcf/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 843.324875] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70c91a9f-1dd8-4ddf-8556-1edab52d67c4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.332536] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Waiting for the task: (returnval){ [ 843.332536] env[61356]: value = "task-4302524" [ 843.332536] env[61356]: _type = "Task" [ 843.332536] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.339985] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Task: {'id': task-4302524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.844283] env[61356]: DEBUG oslo_vmware.exceptions [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 843.844607] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.845170] env[61356]: ERROR nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 843.845170] env[61356]: Faults: ['InvalidArgument'] [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Traceback (most recent call last): [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] yield resources [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self.driver.spawn(context, instance, image_meta, [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self._fetch_image_if_missing(context, vi) [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] image_cache(vi, tmp_image_ds_loc) [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] vm_util.copy_virtual_disk( [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] session._wait_for_task(vmdk_copy_task) [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] return self.wait_for_task(task_ref) [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] return evt.wait() [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] result = hub.switch() [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] return self.greenlet.switch() [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self.f(*self.args, **self.kw) [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] raise exceptions.translate_fault(task_info.error) [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Faults: ['InvalidArgument'] [ 843.845170] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] [ 843.846232] env[61356]: INFO nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Terminating instance [ 843.847063] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.847271] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.847875] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 843.848078] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.848306] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87ad5d47-03b8-49ef-b465-075b3c62a524 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.850530] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdb0564-5584-4479-8f85-5ee3fa8f7a2a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.857716] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 843.858638] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc37008f-6830-459d-a24f-c03c0993b857 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.859975] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.860162] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 843.860801] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01b32de7-0e80-4d73-ab64-1e260cc02b8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.866298] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for the task: (returnval){ [ 843.866298] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]528cdcf0-dc66-1693-d90a-bf7e9ee5dda3" [ 843.866298] env[61356]: _type = "Task" [ 843.866298] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.873686] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]528cdcf0-dc66-1693-d90a-bf7e9ee5dda3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.932140] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 843.932365] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 843.932579] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Deleting the datastore file [datastore2] 9ca64659-93b6-4585-a7dc-f9152184144e {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.932847] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75ffdd81-c595-4487-9982-8ebd06c0d1dd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.939116] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Waiting for the task: (returnval){ [ 843.939116] env[61356]: value = "task-4302526" [ 843.939116] env[61356]: _type = "Task" [ 843.939116] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.946897] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Task: {'id': task-4302526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.377598] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 844.377919] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Creating directory with path [datastore2] vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.378095] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ef6fe21-8350-4320-a6a1-6c0991630f6c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.390008] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Created directory with path [datastore2] vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.390008] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Fetch image to [datastore2] vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 844.390008] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 844.390676] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729dfbbe-d621-4206-b9a0-cdc92217fa1f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.398203] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba17776-fe15-4d0b-93b8-65ca75dd4ee9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.407937] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b16effa-b540-499a-ac69-9cb1f6bf8f6b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.438368] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66ede45-8aff-4cbf-b3ea-b91d9e6e6197 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.448453] env[61356]: DEBUG oslo_vmware.api [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Task: {'id': task-4302526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064061} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.449915] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.450122] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 844.450302] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 844.450479] env[61356]: INFO nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 844.452653] env[61356]: DEBUG nova.compute.claims [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 844.452860] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.453044] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.457749] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b30b10f9-c421-4288-bba4-1a50ac2d0ee0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.482066] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 844.589629] env[61356]: DEBUG nova.scheduler.client.report [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Refreshing inventories for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 844.609120] env[61356]: DEBUG nova.scheduler.client.report [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Updating ProviderTree inventory for provider cfb833eb-84f8-480d-866a-1ad2fb871484 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 844.609349] env[61356]: DEBUG nova.compute.provider_tree [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Updating inventory in ProviderTree for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 844.620029] env[61356]: DEBUG oslo_vmware.rw_handles [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 844.622869] env[61356]: DEBUG nova.scheduler.client.report [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Refreshing aggregate associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, aggregates: None {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 844.680611] env[61356]: DEBUG nova.scheduler.client.report [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Refreshing trait associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 844.685204] env[61356]: DEBUG oslo_vmware.rw_handles [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 844.685434] env[61356]: DEBUG oslo_vmware.rw_handles [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 845.000627] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b8cd5e-7e13-4776-bf5b-7f7ed83aa653 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.008455] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fd26d9-dea6-41c4-a0bd-2160d3067b5d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.040156] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37f1005-84c7-41ee-b649-257a8e575e68 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.047113] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce649b69-9be3-46e1-a1a5-06053cd1be9e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.059855] env[61356]: DEBUG nova.compute.provider_tree [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.071584] env[61356]: DEBUG nova.scheduler.client.report [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 845.090173] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.637s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.091034] env[61356]: ERROR nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 845.091034] env[61356]: Faults: ['InvalidArgument'] [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Traceback (most recent call last): [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self.driver.spawn(context, instance, image_meta, [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self._fetch_image_if_missing(context, vi) [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] image_cache(vi, tmp_image_ds_loc) [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] vm_util.copy_virtual_disk( [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] session._wait_for_task(vmdk_copy_task) [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] return self.wait_for_task(task_ref) [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] return evt.wait() [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] result = hub.switch() [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] return self.greenlet.switch() [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] self.f(*self.args, **self.kw) [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] raise exceptions.translate_fault(task_info.error) [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Faults: ['InvalidArgument'] [ 845.091034] env[61356]: ERROR nova.compute.manager [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] [ 845.092025] env[61356]: DEBUG nova.compute.utils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 845.093685] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Build of instance 9ca64659-93b6-4585-a7dc-f9152184144e was re-scheduled: A specified parameter was not correct: fileType [ 845.093685] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 845.093685] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 845.093685] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 845.093848] env[61356]: DEBUG nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 845.094019] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 845.423826] env[61356]: DEBUG nova.network.neutron [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.435639] env[61356]: INFO nova.compute.manager [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Took 0.34 seconds to deallocate network for instance. [ 845.542374] env[61356]: INFO nova.scheduler.client.report [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Deleted allocations for instance 9ca64659-93b6-4585-a7dc-f9152184144e [ 845.575112] env[61356]: DEBUG oslo_concurrency.lockutils [None req-45636c31-ebd9-48ea-a85a-100fcf820455 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "9ca64659-93b6-4585-a7dc-f9152184144e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 285.402s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.576557] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "9ca64659-93b6-4585-a7dc-f9152184144e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 84.378s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.576557] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Acquiring lock "9ca64659-93b6-4585-a7dc-f9152184144e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.576890] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "9ca64659-93b6-4585-a7dc-f9152184144e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.576890] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "9ca64659-93b6-4585-a7dc-f9152184144e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.580693] env[61356]: INFO nova.compute.manager [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Terminating instance [ 845.582893] env[61356]: DEBUG nova.compute.manager [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 845.583105] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 845.583374] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8be6f335-9534-476a-8017-a03a1d0fc6a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.590036] env[61356]: DEBUG nova.compute.manager [None req-c738df3a-cf02-439a-a3e6-307810aa6729 tempest-ServersTestBootFromVolume-1579151348 tempest-ServersTestBootFromVolume-1579151348-project-member] [instance: ea6b8561-5072-4576-991c-034f106af2df] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 845.602682] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3dd1d1-25c8-43a7-854d-1b6fa9c2d783 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.633377] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9ca64659-93b6-4585-a7dc-f9152184144e could not be found. [ 845.633588] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 845.633762] env[61356]: INFO nova.compute.manager [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 845.634010] env[61356]: DEBUG oslo.service.loopingcall [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.634453] env[61356]: DEBUG nova.compute.manager [None req-c738df3a-cf02-439a-a3e6-307810aa6729 tempest-ServersTestBootFromVolume-1579151348 tempest-ServersTestBootFromVolume-1579151348-project-member] [instance: ea6b8561-5072-4576-991c-034f106af2df] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 845.635337] env[61356]: DEBUG nova.compute.manager [-] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 845.635436] env[61356]: DEBUG nova.network.neutron [-] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 845.657951] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c738df3a-cf02-439a-a3e6-307810aa6729 tempest-ServersTestBootFromVolume-1579151348 tempest-ServersTestBootFromVolume-1579151348-project-member] Lock "ea6b8561-5072-4576-991c-034f106af2df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.028s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.661029] env[61356]: DEBUG nova.network.neutron [-] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.668313] env[61356]: DEBUG nova.compute.manager [None req-13867a57-af74-4012-9a9f-6d39f4705523 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] [instance: dd4a25f1-b8f0-43a1-8187-56eaddfdd08c] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 845.672625] env[61356]: INFO nova.compute.manager [-] [instance: 9ca64659-93b6-4585-a7dc-f9152184144e] Took 0.04 seconds to deallocate network for instance. [ 845.693445] env[61356]: DEBUG nova.compute.manager [None req-13867a57-af74-4012-9a9f-6d39f4705523 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] [instance: dd4a25f1-b8f0-43a1-8187-56eaddfdd08c] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 845.714037] env[61356]: DEBUG oslo_concurrency.lockutils [None req-13867a57-af74-4012-9a9f-6d39f4705523 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Lock "dd4a25f1-b8f0-43a1-8187-56eaddfdd08c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.073s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.723450] env[61356]: DEBUG nova.compute.manager [None req-4d38b50f-6fe0-4b39-b05f-a4e6bafe4852 tempest-AttachInterfacesUnderV243Test-345580002 tempest-AttachInterfacesUnderV243Test-345580002-project-member] [instance: bdfedbb4-25a1-4941-afca-f325e489f9e3] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 845.750341] env[61356]: DEBUG nova.compute.manager [None req-4d38b50f-6fe0-4b39-b05f-a4e6bafe4852 tempest-AttachInterfacesUnderV243Test-345580002 tempest-AttachInterfacesUnderV243Test-345580002-project-member] [instance: bdfedbb4-25a1-4941-afca-f325e489f9e3] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 845.784867] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4d38b50f-6fe0-4b39-b05f-a4e6bafe4852 tempest-AttachInterfacesUnderV243Test-345580002 tempest-AttachInterfacesUnderV243Test-345580002-project-member] Lock "bdfedbb4-25a1-4941-afca-f325e489f9e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.982s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.799886] env[61356]: DEBUG nova.compute.manager [None req-4fc8e0b7-dbc8-4b8e-a524-4d6c4419656b tempest-AttachInterfacesV270Test-953314663 tempest-AttachInterfacesV270Test-953314663-project-member] [instance: b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 845.808740] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e138aaaa-abd6-4003-9033-44412bbde6e4 tempest-ServersTestJSON-1672500734 tempest-ServersTestJSON-1672500734-project-member] Lock "9ca64659-93b6-4585-a7dc-f9152184144e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.232s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.827290] env[61356]: DEBUG nova.compute.manager [None req-4fc8e0b7-dbc8-4b8e-a524-4d6c4419656b tempest-AttachInterfacesV270Test-953314663 tempest-AttachInterfacesV270Test-953314663-project-member] [instance: b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 845.856771] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4fc8e0b7-dbc8-4b8e-a524-4d6c4419656b tempest-AttachInterfacesV270Test-953314663 tempest-AttachInterfacesV270Test-953314663-project-member] Lock "b5e2c0eb-43ff-4e4d-ad2c-b1ce10b9a91e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.857s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.869788] env[61356]: DEBUG nova.compute.manager [None req-ea907ebe-69d7-4ec8-a17a-d43651086f7d tempest-ServerAddressesNegativeTestJSON-420863502 tempest-ServerAddressesNegativeTestJSON-420863502-project-member] [instance: 59b0bc79-1c06-441a-a114-5727e3dec5ef] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 845.897559] env[61356]: DEBUG nova.compute.manager [None req-ea907ebe-69d7-4ec8-a17a-d43651086f7d tempest-ServerAddressesNegativeTestJSON-420863502 tempest-ServerAddressesNegativeTestJSON-420863502-project-member] [instance: 59b0bc79-1c06-441a-a114-5727e3dec5ef] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 845.924023] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ea907ebe-69d7-4ec8-a17a-d43651086f7d tempest-ServerAddressesNegativeTestJSON-420863502 tempest-ServerAddressesNegativeTestJSON-420863502-project-member] Lock "59b0bc79-1c06-441a-a114-5727e3dec5ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.889s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.934030] env[61356]: DEBUG nova.compute.manager [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: 35a37ff4-c3a4-404c-a6fa-934d5d2b3846] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 845.957877] env[61356]: DEBUG nova.compute.manager [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: 35a37ff4-c3a4-404c-a6fa-934d5d2b3846] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 845.980826] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "35a37ff4-c3a4-404c-a6fa-934d5d2b3846" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.123s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.989956] env[61356]: DEBUG nova.compute.manager [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: d6df5c9d-0ead-4663-aa53-6b9a325440c4] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.012093] env[61356]: DEBUG nova.compute.manager [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: d6df5c9d-0ead-4663-aa53-6b9a325440c4] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.032675] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d3f17a43-c8e7-4d03-ad71-7a5ca3596f22 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "d6df5c9d-0ead-4663-aa53-6b9a325440c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.131s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.041298] env[61356]: DEBUG nova.compute.manager [None req-3a46dcd9-2481-42f9-a986-5b9647c4aabf tempest-ServerRescueTestJSON-404971253 tempest-ServerRescueTestJSON-404971253-project-member] [instance: 9f3ce44e-34b4-44b5-b2c9-25e66b715594] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.064644] env[61356]: DEBUG nova.compute.manager [None req-3a46dcd9-2481-42f9-a986-5b9647c4aabf tempest-ServerRescueTestJSON-404971253 tempest-ServerRescueTestJSON-404971253-project-member] [instance: 9f3ce44e-34b4-44b5-b2c9-25e66b715594] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.086794] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3a46dcd9-2481-42f9-a986-5b9647c4aabf tempest-ServerRescueTestJSON-404971253 tempest-ServerRescueTestJSON-404971253-project-member] Lock "9f3ce44e-34b4-44b5-b2c9-25e66b715594" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.349s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.100440] env[61356]: DEBUG nova.compute.manager [None req-6d4d4c46-3961-48f8-93c8-471f552dabc9 tempest-ImagesOneServerTestJSON-1535343616 tempest-ImagesOneServerTestJSON-1535343616-project-member] [instance: ba1b5aa0-1764-4ca3-b771-36ad0d765a24] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.124019] env[61356]: DEBUG nova.compute.manager [None req-6d4d4c46-3961-48f8-93c8-471f552dabc9 tempest-ImagesOneServerTestJSON-1535343616 tempest-ImagesOneServerTestJSON-1535343616-project-member] [instance: ba1b5aa0-1764-4ca3-b771-36ad0d765a24] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.145412] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6d4d4c46-3961-48f8-93c8-471f552dabc9 tempest-ImagesOneServerTestJSON-1535343616 tempest-ImagesOneServerTestJSON-1535343616-project-member] Lock "ba1b5aa0-1764-4ca3-b771-36ad0d765a24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.629s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.156080] env[61356]: DEBUG nova.compute.manager [None req-585d6040-4e15-4c37-aaab-5a075414154e tempest-ServerDiagnosticsV248Test-1598086578 tempest-ServerDiagnosticsV248Test-1598086578-project-member] [instance: 4366b0b5-62ba-4681-9306-d2d9ff903c3a] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.180072] env[61356]: DEBUG nova.compute.manager [None req-585d6040-4e15-4c37-aaab-5a075414154e tempest-ServerDiagnosticsV248Test-1598086578 tempest-ServerDiagnosticsV248Test-1598086578-project-member] [instance: 4366b0b5-62ba-4681-9306-d2d9ff903c3a] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 846.201877] env[61356]: DEBUG oslo_concurrency.lockutils [None req-585d6040-4e15-4c37-aaab-5a075414154e tempest-ServerDiagnosticsV248Test-1598086578 tempest-ServerDiagnosticsV248Test-1598086578-project-member] Lock "4366b0b5-62ba-4681-9306-d2d9ff903c3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.138s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.212958] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 846.279946] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.280221] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.281852] env[61356]: INFO nova.compute.claims [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.668975] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548fa188-39f7-475d-b790-e21755ec4b5c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.677864] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3573a4-3df5-4917-8cd9-aabab2cf6e82 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.712978] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee66037-dbd6-4097-9619-e082d725f133 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.720367] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d222dcbc-e7d4-4b68-a200-bf3d3ee74738 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.732912] env[61356]: DEBUG nova.compute.provider_tree [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.746530] env[61356]: DEBUG nova.scheduler.client.report [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.762815] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.482s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.762815] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 846.809217] env[61356]: DEBUG nova.compute.utils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 846.810811] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 846.811102] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 846.820319] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 846.886230] env[61356]: INFO nova.virt.block_device [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Booting with volume fae2bb99-d83f-4362-a10f-82a2bcf68133 at /dev/sda [ 846.891603] env[61356]: DEBUG nova.policy [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85e50c56d4754273b81dd39380750211', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02eda224b82b4f47a4c15497d3651bea', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 846.935180] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05a86955-79a8-47fd-b64d-b64aec6b88d2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.946438] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408f9d75-da4b-48ed-878b-1005b4e09bc0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.976040] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-753b2347-6bd4-43cc-bda5-e787b5746adb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.983804] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d396027b-108c-41aa-810a-d4f81c41c4aa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.017608] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3b3ed6-7792-418b-bb60-5441e15f1ee2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.025071] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d346bc14-e618-4021-b723-87176fc4cac3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.039634] env[61356]: DEBUG nova.virt.block_device [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Updating existing volume attachment record: c215eb6f-bec1-4fec-a2cd-0ca5e4b08078 {{(pid=61356) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 847.287291] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Successfully created port: f3162b4a-5155-47f7-baf8-391b7928336b {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.339518] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 847.340157] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.340380] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.340537] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.340717] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.340861] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.341018] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.341231] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.341399] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.341565] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.341728] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.341897] env[61356]: DEBUG nova.virt.hardware [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.343401] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0fe624-a9b1-45b2-981a-075ced806a04 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.353264] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6874ca5d-7ce5-4c21-bfef-8b72c0d34c71 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.963231] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Successfully updated port: f3162b4a-5155-47f7-baf8-391b7928336b {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 847.975019] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquiring lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.975019] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquired lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.975019] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.022081] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.211923] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquiring lock "04b94227-4b90-4472-a367-39b8c5242558" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.234133] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Updating instance_info_cache with network_info: [{"id": "f3162b4a-5155-47f7-baf8-391b7928336b", "address": "fa:16:3e:aa:9d:af", "network": {"id": "4f419995-99b9-46be-97b6-d1634912c381", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1385842297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02eda224b82b4f47a4c15497d3651bea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3162b4a-51", "ovs_interfaceid": "f3162b4a-5155-47f7-baf8-391b7928336b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.247625] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Releasing lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.247934] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance network_info: |[{"id": "f3162b4a-5155-47f7-baf8-391b7928336b", "address": "fa:16:3e:aa:9d:af", "network": {"id": "4f419995-99b9-46be-97b6-d1634912c381", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1385842297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02eda224b82b4f47a4c15497d3651bea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3162b4a-51", "ovs_interfaceid": "f3162b4a-5155-47f7-baf8-391b7928336b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 848.248385] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:9d:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'edd47158-6f4b-44a1-8e82-0411205ad299', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3162b4a-5155-47f7-baf8-391b7928336b', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.256046] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Creating folder: Project (02eda224b82b4f47a4c15497d3651bea). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 848.256626] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26cf9db0-58b9-4204-80cd-a734e6148c99 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.269880] env[61356]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 848.270066] env[61356]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61356) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 848.270413] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Folder already exists: Project (02eda224b82b4f47a4c15497d3651bea). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 848.270608] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Creating folder: Instances. Parent ref: group-v846964. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 848.270845] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45a914ce-54f3-40d7-b52a-c8e4811838bb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.280016] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Created folder: Instances in parent group-v846964. [ 848.280283] env[61356]: DEBUG oslo.service.loopingcall [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.280479] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 848.280681] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-381aa95b-178d-48ff-8b6c-742eff008ab0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.301820] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.301820] env[61356]: value = "task-4302529" [ 848.301820] env[61356]: _type = "Task" [ 848.301820] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.311893] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302529, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.320277] env[61356]: DEBUG nova.compute.manager [req-59c78c07-2e8a-42ef-996d-de4ce855d827 req-d625372d-ffe5-494d-a500-993d84f23125 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Received event network-vif-plugged-f3162b4a-5155-47f7-baf8-391b7928336b {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 848.320545] env[61356]: DEBUG oslo_concurrency.lockutils [req-59c78c07-2e8a-42ef-996d-de4ce855d827 req-d625372d-ffe5-494d-a500-993d84f23125 service nova] Acquiring lock "04b94227-4b90-4472-a367-39b8c5242558-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.320763] env[61356]: DEBUG oslo_concurrency.lockutils [req-59c78c07-2e8a-42ef-996d-de4ce855d827 req-d625372d-ffe5-494d-a500-993d84f23125 service nova] Lock "04b94227-4b90-4472-a367-39b8c5242558-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.320928] env[61356]: DEBUG oslo_concurrency.lockutils [req-59c78c07-2e8a-42ef-996d-de4ce855d827 req-d625372d-ffe5-494d-a500-993d84f23125 service nova] Lock "04b94227-4b90-4472-a367-39b8c5242558-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.321517] env[61356]: DEBUG nova.compute.manager [req-59c78c07-2e8a-42ef-996d-de4ce855d827 req-d625372d-ffe5-494d-a500-993d84f23125 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] No waiting events found dispatching network-vif-plugged-f3162b4a-5155-47f7-baf8-391b7928336b {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 848.321960] env[61356]: WARNING nova.compute.manager [req-59c78c07-2e8a-42ef-996d-de4ce855d827 req-d625372d-ffe5-494d-a500-993d84f23125 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Received unexpected event network-vif-plugged-f3162b4a-5155-47f7-baf8-391b7928336b for instance with vm_state building and task_state deleting. [ 848.812318] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302529, 'name': CreateVM_Task, 'duration_secs': 0.30567} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.812541] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 848.813186] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'c215eb6f-bec1-4fec-a2cd-0ca5e4b08078', 'delete_on_termination': True, 'disk_bus': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-846967', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'name': 'volume-fae2bb99-d83f-4362-a10f-82a2bcf68133', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '04b94227-4b90-4472-a367-39b8c5242558', 'attached_at': '', 'detached_at': '', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'serial': 'fae2bb99-d83f-4362-a10f-82a2bcf68133'}, 'device_type': None, 'boot_index': 0, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=61356) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 848.813412] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Root volume attach. Driver type: vmdk {{(pid=61356) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 848.814223] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce1a94a-57c6-4fb7-99d9-9f0a7f284c32 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.827208] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2206426-6c73-4824-bc06-8514cd9a7483 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.835429] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fa6142-4902-46e9-8152-180f7ee3df70 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.843447] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-0cc30543-ab64-45bc-9615-10a053f26c06 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.853330] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 848.853330] env[61356]: value = "task-4302530" [ 848.853330] env[61356]: _type = "Task" [ 848.853330] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.863980] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302530, 'name': RelocateVM_Task} progress is 5%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.363565] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302530, 'name': RelocateVM_Task, 'duration_secs': 0.025601} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.363927] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Volume attach. Driver type: vmdk {{(pid=61356) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 849.364193] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-846967', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'name': 'volume-fae2bb99-d83f-4362-a10f-82a2bcf68133', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '04b94227-4b90-4472-a367-39b8c5242558', 'attached_at': '', 'detached_at': '', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'serial': 'fae2bb99-d83f-4362-a10f-82a2bcf68133'} {{(pid=61356) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 849.365222] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f02acbf-11c4-45da-87fe-5b490a431846 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.386938] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48411771-a93f-4ec2-967f-c837825c9a1b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.417910] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] volume-fae2bb99-d83f-4362-a10f-82a2bcf68133/volume-fae2bb99-d83f-4362-a10f-82a2bcf68133.vmdk or device None with type thin {{(pid=61356) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.418756] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53cf2402-2c0a-472a-a41f-4e2ca1baae5c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.438424] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 849.438424] env[61356]: value = "task-4302531" [ 849.438424] env[61356]: _type = "Task" [ 849.438424] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.447338] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302531, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.949055] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302531, 'name': ReconfigVM_Task, 'duration_secs': 0.270207} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.949332] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Reconfigured VM instance instance-0000001f to attach disk [datastore2] volume-fae2bb99-d83f-4362-a10f-82a2bcf68133/volume-fae2bb99-d83f-4362-a10f-82a2bcf68133.vmdk or device None with type thin {{(pid=61356) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.954031] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a391b78-7c55-47a8-950c-fa6997b5549f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.968565] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 849.968565] env[61356]: value = "task-4302532" [ 849.968565] env[61356]: _type = "Task" [ 849.968565] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.976297] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302532, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.347614] env[61356]: DEBUG nova.compute.manager [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Received event network-changed-f3162b4a-5155-47f7-baf8-391b7928336b {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 850.348038] env[61356]: DEBUG nova.compute.manager [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Refreshing instance network info cache due to event network-changed-f3162b4a-5155-47f7-baf8-391b7928336b. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 850.349147] env[61356]: DEBUG oslo_concurrency.lockutils [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] Acquiring lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.349147] env[61356]: DEBUG oslo_concurrency.lockutils [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] Acquired lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.349147] env[61356]: DEBUG nova.network.neutron [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Refreshing network info cache for port f3162b4a-5155-47f7-baf8-391b7928336b {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 850.482201] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302532, 'name': ReconfigVM_Task, 'duration_secs': 0.145543} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.482629] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-846967', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'name': 'volume-fae2bb99-d83f-4362-a10f-82a2bcf68133', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '04b94227-4b90-4472-a367-39b8c5242558', 'attached_at': '', 'detached_at': '', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'serial': 'fae2bb99-d83f-4362-a10f-82a2bcf68133'} {{(pid=61356) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 850.483257] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4cdabf1f-b16e-4600-8610-d9f0e99c9b52 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.490636] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 850.490636] env[61356]: value = "task-4302533" [ 850.490636] env[61356]: _type = "Task" [ 850.490636] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.499069] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302533, 'name': Rename_Task} progress is 5%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.000497] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302533, 'name': Rename_Task, 'duration_secs': 0.12186} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.000779] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Powering on the VM {{(pid=61356) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 851.001044] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8cfdc9b8-06cd-490c-a997-107359da5cde {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.007877] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 851.007877] env[61356]: value = "task-4302534" [ 851.007877] env[61356]: _type = "Task" [ 851.007877] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.014736] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.032900] env[61356]: DEBUG nova.network.neutron [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Updated VIF entry in instance network info cache for port f3162b4a-5155-47f7-baf8-391b7928336b. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 851.033343] env[61356]: DEBUG nova.network.neutron [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Updating instance_info_cache with network_info: [{"id": "f3162b4a-5155-47f7-baf8-391b7928336b", "address": "fa:16:3e:aa:9d:af", "network": {"id": "4f419995-99b9-46be-97b6-d1634912c381", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1385842297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02eda224b82b4f47a4c15497d3651bea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3162b4a-51", "ovs_interfaceid": "f3162b4a-5155-47f7-baf8-391b7928336b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.047740] env[61356]: DEBUG oslo_concurrency.lockutils [req-6e97cc8f-b75c-44a4-8ee0-1d2de9384daf req-fc67f49c-00a4-42c4-93ee-a8b68664e869 service nova] Releasing lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.519131] env[61356]: DEBUG oslo_vmware.api [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302534, 'name': PowerOnVM_Task, 'duration_secs': 0.486539} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.519358] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Powered on the VM {{(pid=61356) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 851.519477] env[61356]: INFO nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Took 4.18 seconds to spawn the instance on the hypervisor. [ 851.519667] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Checking state {{(pid=61356) _get_power_state /opt/stack/nova/nova/compute/manager.py:1783}} [ 851.520605] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9489f119-34e8-4612-92f7-1ec67012b4e3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.569659] env[61356]: DEBUG nova.compute.utils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Conflict updating instance 04b94227-4b90-4472-a367-39b8c5242558. Expected: {'task_state': ['spawning']}. Actual: {'task_state': 'deleting'} {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 851.571223] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance disappeared during build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 851.571499] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 851.571603] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 851.571951] env[61356]: DEBUG nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 851.572048] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 852.283320] env[61356]: DEBUG nova.network.neutron [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.298249] env[61356]: INFO nova.compute.manager [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Took 0.73 seconds to deallocate network for instance. [ 852.410427] env[61356]: INFO nova.scheduler.client.report [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Deleted allocations for instance 04b94227-4b90-4472-a367-39b8c5242558 [ 852.414108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-06baf4b9-b974-4404-9273-371a164906f4 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "04b94227-4b90-4472-a367-39b8c5242558" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.428s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.414108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "04b94227-4b90-4472-a367-39b8c5242558" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 4.200s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.414108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquiring lock "04b94227-4b90-4472-a367-39b8c5242558-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.414108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "04b94227-4b90-4472-a367-39b8c5242558-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.414108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "04b94227-4b90-4472-a367-39b8c5242558-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.415478] env[61356]: INFO nova.compute.manager [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Terminating instance [ 852.419194] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquiring lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.419352] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquired lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.419517] env[61356]: DEBUG nova.network.neutron [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 852.428212] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 852.460154] env[61356]: DEBUG nova.network.neutron [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 852.502586] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.502586] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.502586] env[61356]: INFO nova.compute.claims [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 852.549497] env[61356]: DEBUG nova.compute.manager [req-8fa432be-f0d5-4dfd-9987-3268397742de req-fc694e1e-db65-47aa-8925-33ed3eee7286 service nova] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Received event network-vif-deleted-f3162b4a-5155-47f7-baf8-391b7928336b {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 852.690558] env[61356]: DEBUG nova.network.neutron [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.708251] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Releasing lock "refresh_cache-04b94227-4b90-4472-a367-39b8c5242558" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.708251] env[61356]: DEBUG nova.compute.manager [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 852.708409] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Powering off the VM {{(pid=61356) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 852.708828] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4195836d-875a-45ec-b185-b4169075870f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.716676] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 852.716676] env[61356]: value = "task-4302535" [ 852.716676] env[61356]: _type = "Task" [ 852.716676] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.732749] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302535, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.013199] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9826c02-4663-4ab9-a570-1180728d5fc7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.020291] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcebe392-2a27-4538-af86-c9a4600f9d72 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.052367] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08e5aee-fe33-44fa-9311-2c01fe2734fd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.062728] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d65fecf-d965-43ba-b9ff-64c45e62609a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.089208] env[61356]: DEBUG nova.compute.provider_tree [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.097554] env[61356]: DEBUG nova.scheduler.client.report [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 853.112601] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.612s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.113817] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 853.158080] env[61356]: DEBUG nova.compute.utils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.161040] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 853.161040] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 853.168576] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 853.225017] env[61356]: DEBUG nova.policy [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '587a0d4b02d943cabc2bcb68ca46deef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'adc7029407f041e29457cffebb21457d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 853.229962] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302535, 'name': PowerOffVM_Task, 'duration_secs': 0.178071} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.230389] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Powered off the VM {{(pid=61356) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 853.230935] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Volume detach. Driver type: vmdk {{(pid=61356) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 853.231405] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-846967', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'name': 'volume-fae2bb99-d83f-4362-a10f-82a2bcf68133', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '04b94227-4b90-4472-a367-39b8c5242558', 'attached_at': '', 'detached_at': '', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'serial': 'fae2bb99-d83f-4362-a10f-82a2bcf68133'} {{(pid=61356) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 853.232699] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f562c4-c7ab-4510-90e2-8bf645758f2c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.254446] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 853.257506] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9330e4d3-c1db-4fd6-bb8d-ace41b5e0af5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.270248] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705a6d00-5a39-47a6-81dc-e1a708e0970a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.294076] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d154d0d-e449-412e-b17f-7a7c4c96f278 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.310075] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] The volume has not been displaced from its original location: [datastore2] volume-fae2bb99-d83f-4362-a10f-82a2bcf68133/volume-fae2bb99-d83f-4362-a10f-82a2bcf68133.vmdk. No consolidation needed. {{(pid=61356) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 853.315969] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Reconfiguring VM instance instance-0000001f to detach disk 2000 {{(pid=61356) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 853.318347] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 853.318728] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 853.318998] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.319468] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 853.319811] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.320118] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 853.320433] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 853.320701] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 853.322566] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 853.322566] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 853.322566] env[61356]: DEBUG nova.virt.hardware [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 853.322566] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9d3586a-711f-48b8-b640-6c015888036b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.334948] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1729cc2-e684-4ebb-b7d2-7906fcf13c6d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.343661] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38097d8-f99f-4f5c-8bf9-d612184b94f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.348350] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 853.348350] env[61356]: value = "task-4302536" [ 853.348350] env[61356]: _type = "Task" [ 853.348350] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.362505] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302536, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.578060] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Successfully created port: ce53405c-aad1-45f9-a99b-d54c0a5f75e9 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.858824] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302536, 'name': ReconfigVM_Task, 'duration_secs': 0.170004} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.858824] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Reconfigured VM instance instance-0000001f to detach disk 2000 {{(pid=61356) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 853.863332] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2c9c31f-e450-46da-91a5-c718117cc095 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.878015] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 853.878015] env[61356]: value = "task-4302537" [ 853.878015] env[61356]: _type = "Task" [ 853.878015] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.886180] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302537, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.219066] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "498180c6-cc07-4c5a-bf1e-784148909d6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.219296] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.308458] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Successfully updated port: ce53405c-aad1-45f9-a99b-d54c0a5f75e9 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.332179] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "refresh_cache-2d2dca63-8ab0-4420-90b0-38a330df8271" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.332328] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquired lock "refresh_cache-2d2dca63-8ab0-4420-90b0-38a330df8271" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.332626] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 854.387337] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.392616] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302537, 'name': ReconfigVM_Task, 'duration_secs': 0.107801} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.393127] env[61356]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-846967', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'name': 'volume-fae2bb99-d83f-4362-a10f-82a2bcf68133', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '04b94227-4b90-4472-a367-39b8c5242558', 'attached_at': '', 'detached_at': '', 'volume_id': 'fae2bb99-d83f-4362-a10f-82a2bcf68133', 'serial': 'fae2bb99-d83f-4362-a10f-82a2bcf68133'} {{(pid=61356) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 854.393667] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 854.394691] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f43a5c-10ec-4c06-b5d9-1429bce31e6a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.402988] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 854.402988] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22ee5986-4e9f-4f38-928b-ad40884e6bbd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.486017] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 854.486492] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 854.486741] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Deleting the datastore file [datastore2] 04b94227-4b90-4472-a367-39b8c5242558 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.487048] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9bafb97-371e-491f-8281-dad972876d95 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.493953] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for the task: (returnval){ [ 854.493953] env[61356]: value = "task-4302539" [ 854.493953] env[61356]: _type = "Task" [ 854.493953] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.503311] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.581291] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Updating instance_info_cache with network_info: [{"id": "ce53405c-aad1-45f9-a99b-d54c0a5f75e9", "address": "fa:16:3e:c3:06:72", "network": {"id": "63264cfa-8dd5-4434-8797-944fbf1220bd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1463108079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adc7029407f041e29457cffebb21457d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce53405c-aa", "ovs_interfaceid": "ce53405c-aad1-45f9-a99b-d54c0a5f75e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.592899] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Releasing lock "refresh_cache-2d2dca63-8ab0-4420-90b0-38a330df8271" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.593217] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Instance network_info: |[{"id": "ce53405c-aad1-45f9-a99b-d54c0a5f75e9", "address": "fa:16:3e:c3:06:72", "network": {"id": "63264cfa-8dd5-4434-8797-944fbf1220bd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1463108079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adc7029407f041e29457cffebb21457d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce53405c-aa", "ovs_interfaceid": "ce53405c-aad1-45f9-a99b-d54c0a5f75e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 854.593925] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:06:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce53405c-aad1-45f9-a99b-d54c0a5f75e9', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.601356] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Creating folder: Project (adc7029407f041e29457cffebb21457d). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 854.601972] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca4ac5a2-d488-41e2-ba6a-88bf21d91b39 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.612800] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Created folder: Project (adc7029407f041e29457cffebb21457d) in parent group-v846926. [ 854.613044] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Creating folder: Instances. Parent ref: group-v846982. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 854.613284] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fdc0870-eda3-4b82-a97d-b98d10259bc5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.622876] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Created folder: Instances in parent group-v846982. [ 854.623579] env[61356]: DEBUG oslo.service.loopingcall [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.623579] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 854.623579] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba2699b5-6e01-4aff-8c10-a1c0d9ba2907 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.649872] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.649872] env[61356]: value = "task-4302542" [ 854.649872] env[61356]: _type = "Task" [ 854.649872] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.661512] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302542, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.832079] env[61356]: DEBUG nova.compute.manager [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Received event network-vif-plugged-ce53405c-aad1-45f9-a99b-d54c0a5f75e9 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 854.832300] env[61356]: DEBUG oslo_concurrency.lockutils [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] Acquiring lock "2d2dca63-8ab0-4420-90b0-38a330df8271-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.832518] env[61356]: DEBUG oslo_concurrency.lockutils [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.832697] env[61356]: DEBUG oslo_concurrency.lockutils [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.832864] env[61356]: DEBUG nova.compute.manager [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] No waiting events found dispatching network-vif-plugged-ce53405c-aad1-45f9-a99b-d54c0a5f75e9 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.833185] env[61356]: WARNING nova.compute.manager [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Received unexpected event network-vif-plugged-ce53405c-aad1-45f9-a99b-d54c0a5f75e9 for instance with vm_state building and task_state spawning. [ 854.833372] env[61356]: DEBUG nova.compute.manager [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Received event network-changed-ce53405c-aad1-45f9-a99b-d54c0a5f75e9 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 854.833546] env[61356]: DEBUG nova.compute.manager [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Refreshing instance network info cache due to event network-changed-ce53405c-aad1-45f9-a99b-d54c0a5f75e9. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 854.833747] env[61356]: DEBUG oslo_concurrency.lockutils [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] Acquiring lock "refresh_cache-2d2dca63-8ab0-4420-90b0-38a330df8271" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.833816] env[61356]: DEBUG oslo_concurrency.lockutils [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] Acquired lock "refresh_cache-2d2dca63-8ab0-4420-90b0-38a330df8271" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.833961] env[61356]: DEBUG nova.network.neutron [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Refreshing network info cache for port ce53405c-aad1-45f9-a99b-d54c0a5f75e9 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 855.006529] env[61356]: DEBUG oslo_vmware.api [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Task: {'id': task-4302539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082335} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.006529] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.006529] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 855.006529] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.006529] env[61356]: INFO nova.compute.manager [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Took 2.30 seconds to destroy the instance on the hypervisor. [ 855.006529] env[61356]: DEBUG oslo.service.loopingcall [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.006529] env[61356]: DEBUG nova.compute.manager [-] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 855.006529] env[61356]: DEBUG nova.network.neutron [-] [instance: 04b94227-4b90-4472-a367-39b8c5242558] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.034656] env[61356]: DEBUG nova.network.neutron [-] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.044818] env[61356]: DEBUG nova.network.neutron [-] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.060055] env[61356]: INFO nova.compute.manager [-] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Took 0.06 seconds to deallocate network for instance. [ 855.130914] env[61356]: INFO nova.compute.manager [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Took 0.07 seconds to detach 1 volumes for instance. [ 855.135486] env[61356]: DEBUG nova.compute.manager [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Deleting volume: fae2bb99-d83f-4362-a10f-82a2bcf68133 {{(pid=61356) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3239}} [ 855.159735] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302542, 'name': CreateVM_Task, 'duration_secs': 0.370361} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.160255] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 855.162523] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.162523] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.162523] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 855.165164] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c876be54-de31-4a9c-8221-49569d6c04d8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.170373] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Waiting for the task: (returnval){ [ 855.170373] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52bdc47b-0fd0-fba3-8264-e34690c4522c" [ 855.170373] env[61356]: _type = "Task" [ 855.170373] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.177755] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52bdc47b-0fd0-fba3-8264-e34690c4522c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.219524] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.220234] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.220546] env[61356]: DEBUG nova.objects.instance [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lazy-loading 'resources' on Instance uuid 04b94227-4b90-4472-a367-39b8c5242558 {{(pid=61356) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.307483] env[61356]: DEBUG nova.network.neutron [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Updated VIF entry in instance network info cache for port ce53405c-aad1-45f9-a99b-d54c0a5f75e9. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 855.307831] env[61356]: DEBUG nova.network.neutron [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Updating instance_info_cache with network_info: [{"id": "ce53405c-aad1-45f9-a99b-d54c0a5f75e9", "address": "fa:16:3e:c3:06:72", "network": {"id": "63264cfa-8dd5-4434-8797-944fbf1220bd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1463108079-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adc7029407f041e29457cffebb21457d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce53405c-aa", "ovs_interfaceid": "ce53405c-aad1-45f9-a99b-d54c0a5f75e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.318467] env[61356]: DEBUG oslo_concurrency.lockutils [req-a05d8ab4-7912-4197-bd5d-681f18481688 req-b57c2a21-5ebc-40ba-9aa5-7fcc8870e32f service nova] Releasing lock "refresh_cache-2d2dca63-8ab0-4420-90b0-38a330df8271" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.550084] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623badd1-aaf5-42c0-9484-97a9bf7aca6a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.559938] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bbfcb6-8366-4e16-b648-88c0511c4dcf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.591374] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcd7298-076f-4b77-9759-dfa10f4219fd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.599909] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c76e39-0dfd-4f6b-b28b-ec21e2bbc80d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.612801] env[61356]: DEBUG nova.compute.provider_tree [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.623132] env[61356]: DEBUG nova.scheduler.client.report [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.644355] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.424s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.680889] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.680889] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.680889] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.701909] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a946a0b-b262-4790-8bb6-52dd34425910 tempest-ServerActionsV293TestJSON-1779699484 tempest-ServerActionsV293TestJSON-1779699484-project-member] Lock "04b94227-4b90-4472-a367-39b8c5242558" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.290s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.757867] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 856.219224] env[61356]: DEBUG oslo_concurrency.lockutils [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "2d2dca63-8ab0-4420-90b0-38a330df8271" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.758133] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 856.770898] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.771192] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.771372] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.771534] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 856.772689] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fcc322-97d0-467f-b764-627ac7bd74ab {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.783745] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcbcee3-6ad8-44bc-830c-86d7e4d8ed9c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.803536] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc0716e-b851-44cd-a506-78c40c2776b0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.810338] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50662811-0017-4636-b29a-65debe9f98c7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.846332] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180642MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 856.846553] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.846630] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.935682] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 6a2bad10-6948-482a-803a-2da7cf733d49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.936246] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.936334] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance eed82b3f-6590-4677-a4a2-680d769400fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.936460] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.936588] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.936749] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.936831] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.936969] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.937103] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.937219] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 856.952733] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance a24ec07d-7e22-4f7e-9e69-9e5f60f505d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 856.965073] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 856.979456] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 856.994363] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.005243] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.017378] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 42716a31-0ced-493f-b7f7-5576ef2babd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.027957] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 79969f15-053a-4567-b705-fbfca33b6ee8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.037567] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b5cebdbc-f815-4b93-ac2a-27f0121966a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.047391] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4053d28b-837f-4e5c-bdbe-10f10cb3ce04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.060278] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f8c5de0-e819-4702-b871-619e221eeb4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.077556] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 612128fe-2d34-46e7-9496-f6c85dcf4ca4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.090583] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance acc5ec07-0d2f-413c-ab18-07876f43d786 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.103339] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance a76f796d-63d2-41ad-b800-629b68c00694 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.113548] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7babcc84-5bde-464c-9ac8-c832cb3887f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.125470] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.125748] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 857.125894] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 857.525234] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14726893-dac7-44c2-aaeb-2238752c1912 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.533055] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e87c7f-07ee-4dd6-8935-bb8db319f2ce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.564457] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2618a1-a939-4386-a5e9-03871d6a0682 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.571900] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daff5adb-6c87-41b5-897e-740557eba108 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.584900] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.593366] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 857.618254] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 857.618522] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.772s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.619550] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.757899] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.758156] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.758316] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.754638] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.757608] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 860.757870] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 861.758713] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.759112] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 861.759112] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 861.786160] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.786337] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.786469] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.786596] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.786725] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.786839] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.786958] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.787215] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.787446] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.787604] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 861.787832] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 863.786654] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.879912] env[61356]: DEBUG oslo_concurrency.lockutils [None req-aadbc6dd-760e-4501-b740-425c00540327 tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] Acquiring lock "798abe6a-1a6b-4844-b61a-fe258e6f1f04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.880257] env[61356]: DEBUG oslo_concurrency.lockutils [None req-aadbc6dd-760e-4501-b740-425c00540327 tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] Lock "798abe6a-1a6b-4844-b61a-fe258e6f1f04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.940726] env[61356]: WARNING oslo_vmware.rw_handles [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 893.940726] env[61356]: ERROR oslo_vmware.rw_handles [ 893.940726] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 893.940726] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 893.940726] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Copying Virtual Disk [datastore2] vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/7ccc365a-b26e-420e-9277-2c6cb600a13b/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 893.940726] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54a28d78-0401-4be7-8190-233ae5519e49 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.940726] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for the task: (returnval){ [ 893.940726] env[61356]: value = "task-4302544" [ 893.940726] env[61356]: _type = "Task" [ 893.940726] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.940726] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': task-4302544, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.940726] env[61356]: DEBUG oslo_vmware.exceptions [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 893.940726] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.940726] env[61356]: ERROR nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 893.940726] env[61356]: Faults: ['InvalidArgument'] [ 893.940726] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Traceback (most recent call last): [ 893.940726] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 893.940726] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] yield resources [ 893.940726] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 893.940726] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self.driver.spawn(context, instance, image_meta, [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self._vmops.spawn(context, instance, image_meta, injected_files, [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self._fetch_image_if_missing(context, vi) [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] image_cache(vi, tmp_image_ds_loc) [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] vm_util.copy_virtual_disk( [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] session._wait_for_task(vmdk_copy_task) [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] return self.wait_for_task(task_ref) [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] return evt.wait() [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] result = hub.switch() [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] return self.greenlet.switch() [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self.f(*self.args, **self.kw) [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] raise exceptions.translate_fault(task_info.error) [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Faults: ['InvalidArgument'] [ 893.942147] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] [ 893.942147] env[61356]: INFO nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Terminating instance [ 893.942147] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.942147] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.942147] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2874b50-7ad7-41dd-8375-750a13d370d9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.942147] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.942147] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 893.943115] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771f044e-6391-40d2-be61-118d3ce45e3e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943115] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Waiting for the task: (returnval){ [ 893.943115] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52315628-f4bf-1434-228a-2acb3f4b86eb" [ 893.943115] env[61356]: _type = "Task" [ 893.943115] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.943115] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52315628-f4bf-1434-228a-2acb3f4b86eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.943115] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 893.943115] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Creating directory with path [datastore2] vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.943115] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d62553c6-c342-4673-a76a-cf69dc7b23f5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943115] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Created directory with path [datastore2] vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.943115] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Fetch image to [datastore2] vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 893.943115] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 893.943115] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39995aeb-ffee-4bed-ab3e-c03cf5d0620d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943115] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82919586-4353-4b75-bf6b-75b6543f955d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943115] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e6f8da-36a6-4344-b24d-388b9099aba8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943115] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c026b4c0-5499-459c-99fa-c6058545550a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943115] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b1ef8cfd-b07a-4c56-9dd5-10108d1affa1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.943115] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 893.943747] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 893.943747] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 893.943747] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0cef75-b36c-43ed-8f95-139bbfbeab4a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.953639] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 893.953893] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4940152-95dd-42bb-8714-36ce4874a0a7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.021263] env[61356]: DEBUG oslo_vmware.rw_handles [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 894.083435] env[61356]: DEBUG oslo_vmware.rw_handles [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 894.083642] env[61356]: DEBUG oslo_vmware.rw_handles [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 894.135743] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 894.135971] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 894.136169] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Deleting the datastore file [datastore2] 6a2bad10-6948-482a-803a-2da7cf733d49 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 894.136434] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd83e748-7f93-433e-9fe1-ee79bfd1bd43 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.143106] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for the task: (returnval){ [ 894.143106] env[61356]: value = "task-4302546" [ 894.143106] env[61356]: _type = "Task" [ 894.143106] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.151226] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': task-4302546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.653850] env[61356]: DEBUG oslo_vmware.api [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': task-4302546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08524} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.654365] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.654574] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 894.654748] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 894.655142] env[61356]: INFO nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Took 0.71 seconds to destroy the instance on the hypervisor. [ 894.657454] env[61356]: DEBUG nova.compute.claims [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 894.657634] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.658387] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.134377] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81db9670-0081-403a-9aeb-871465b14b8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.141574] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1116609-88c7-4a43-8edd-64a0e39fbb7b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.171043] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1675106-523a-4a9e-9abc-29998329489f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.178417] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b9b521-e89f-441c-99be-a93a94a44b93 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.191227] env[61356]: DEBUG nova.compute.provider_tree [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.200487] env[61356]: DEBUG nova.scheduler.client.report [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.217484] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.559s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.218022] env[61356]: ERROR nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 895.218022] env[61356]: Faults: ['InvalidArgument'] [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Traceback (most recent call last): [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self.driver.spawn(context, instance, image_meta, [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self._vmops.spawn(context, instance, image_meta, injected_files, [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self._fetch_image_if_missing(context, vi) [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] image_cache(vi, tmp_image_ds_loc) [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] vm_util.copy_virtual_disk( [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] session._wait_for_task(vmdk_copy_task) [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] return self.wait_for_task(task_ref) [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] return evt.wait() [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] result = hub.switch() [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] return self.greenlet.switch() [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] self.f(*self.args, **self.kw) [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] raise exceptions.translate_fault(task_info.error) [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Faults: ['InvalidArgument'] [ 895.218022] env[61356]: ERROR nova.compute.manager [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] [ 895.219025] env[61356]: DEBUG nova.compute.utils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 895.220173] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Build of instance 6a2bad10-6948-482a-803a-2da7cf733d49 was re-scheduled: A specified parameter was not correct: fileType [ 895.220173] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 895.220553] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 895.220723] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 895.220885] env[61356]: DEBUG nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 895.221057] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.568859] env[61356]: DEBUG nova.network.neutron [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.617045] env[61356]: INFO nova.compute.manager [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Took 1.40 seconds to deallocate network for instance. [ 896.840232] env[61356]: INFO nova.scheduler.client.report [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Deleted allocations for instance 6a2bad10-6948-482a-803a-2da7cf733d49 [ 896.974790] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f28f4676-018e-4b1d-879b-85334a3d8a7f tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "6a2bad10-6948-482a-803a-2da7cf733d49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 353.215s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.976309] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "6a2bad10-6948-482a-803a-2da7cf733d49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 154.471s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.976381] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.976536] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.976703] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "6a2bad10-6948-482a-803a-2da7cf733d49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.979161] env[61356]: INFO nova.compute.manager [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Terminating instance [ 896.980918] env[61356]: DEBUG nova.compute.manager [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 896.981161] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 896.981844] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08343b1f-7a37-48a5-82f3-ab5996d88358 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.992368] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99098482-16ee-4029-ac38-cb92ff4d19b8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.003912] env[61356]: DEBUG nova.compute.manager [None req-455efbe1-2995-4bd4-9dd7-1584e0087a32 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: a24ec07d-7e22-4f7e-9e69-9e5f60f505d1] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 897.025670] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6a2bad10-6948-482a-803a-2da7cf733d49 could not be found. [ 897.025670] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 897.031232] env[61356]: INFO nova.compute.manager [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Took 0.04 seconds to destroy the instance on the hypervisor. [ 897.031232] env[61356]: DEBUG oslo.service.loopingcall [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.031232] env[61356]: DEBUG nova.compute.manager [-] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 897.031232] env[61356]: DEBUG nova.network.neutron [-] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 897.042937] env[61356]: DEBUG nova.compute.manager [None req-455efbe1-2995-4bd4-9dd7-1584e0087a32 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] [instance: a24ec07d-7e22-4f7e-9e69-9e5f60f505d1] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 897.052156] env[61356]: DEBUG nova.network.neutron [-] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.060919] env[61356]: INFO nova.compute.manager [-] [instance: 6a2bad10-6948-482a-803a-2da7cf733d49] Took 0.03 seconds to deallocate network for instance. [ 897.066707] env[61356]: DEBUG oslo_concurrency.lockutils [None req-455efbe1-2995-4bd4-9dd7-1584e0087a32 tempest-DeleteServersAdminTestJSON-1657816063 tempest-DeleteServersAdminTestJSON-1657816063-project-member] Lock "a24ec07d-7e22-4f7e-9e69-9e5f60f505d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.443s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.075655] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 897.222481] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.222481] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.226408] env[61356]: INFO nova.compute.claims [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.400413] env[61356]: DEBUG oslo_concurrency.lockutils [None req-4703a8b8-7042-41a7-b92e-9a141dd398a2 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "6a2bad10-6948-482a-803a-2da7cf733d49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.424s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.747850] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c881b65-6764-4eec-9cb7-f4266509c0a5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.756258] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a1f202-062f-41a5-9d35-5dc8b7626dcd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.786937] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a95b28f-91ba-4954-acad-7836e6fb5437 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.796179] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2095e052-aca4-4a84-a7b6-e598c05014ce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.810803] env[61356]: DEBUG nova.compute.provider_tree [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.820363] env[61356]: DEBUG nova.scheduler.client.report [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 897.846289] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.625s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.846815] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 897.913963] env[61356]: DEBUG nova.compute.utils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.915606] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 897.915931] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 897.929737] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 898.065813] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 898.113947] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:45:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='50a85c44-ee21-45a5-af94-aff3152340e2',id=38,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-2140625328',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.114237] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.114400] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.114585] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.114732] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.114914] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.115127] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.115293] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.115459] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.115653] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.115846] env[61356]: DEBUG nova.virt.hardware [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.116734] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dafaf73-0e25-40b6-b186-7662fb1167e5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.124988] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafedbb8-f10a-4116-8d9b-7b1ce00da95f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.129819] env[61356]: DEBUG nova.policy [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88792820aa3441e986522ccf9b2f7a43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9e03c4102e846819717689589fd7107', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 899.260448] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Successfully created port: 52162d72-6417-4793-981b-1d2594a786d4 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.971518] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Successfully updated port: 52162d72-6417-4793-981b-1d2594a786d4 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 901.140766] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "refresh_cache-22e72346-49c6-4044-ae29-96679d4f2253" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.140917] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired lock "refresh_cache-22e72346-49c6-4044-ae29-96679d4f2253" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.141087] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 901.205092] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.549744] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Updating instance_info_cache with network_info: [{"id": "52162d72-6417-4793-981b-1d2594a786d4", "address": "fa:16:3e:ad:7f:98", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52162d72-64", "ovs_interfaceid": "52162d72-6417-4793-981b-1d2594a786d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.582381] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Releasing lock "refresh_cache-22e72346-49c6-4044-ae29-96679d4f2253" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.582381] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Instance network_info: |[{"id": "52162d72-6417-4793-981b-1d2594a786d4", "address": "fa:16:3e:ad:7f:98", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52162d72-64", "ovs_interfaceid": "52162d72-6417-4793-981b-1d2594a786d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.582381] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:7f:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52162d72-6417-4793-981b-1d2594a786d4', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.594372] env[61356]: DEBUG oslo.service.loopingcall [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.594918] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 901.595241] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3f37c89-536e-4ced-800e-54503254b4be {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.616986] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.616986] env[61356]: value = "task-4302547" [ 901.616986] env[61356]: _type = "Task" [ 901.616986] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.625101] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302547, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.775432] env[61356]: DEBUG nova.compute.manager [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Received event network-vif-plugged-52162d72-6417-4793-981b-1d2594a786d4 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 901.775432] env[61356]: DEBUG oslo_concurrency.lockutils [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] Acquiring lock "22e72346-49c6-4044-ae29-96679d4f2253-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.775432] env[61356]: DEBUG oslo_concurrency.lockutils [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] Lock "22e72346-49c6-4044-ae29-96679d4f2253-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.775432] env[61356]: DEBUG oslo_concurrency.lockutils [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] Lock "22e72346-49c6-4044-ae29-96679d4f2253-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.775432] env[61356]: DEBUG nova.compute.manager [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] No waiting events found dispatching network-vif-plugged-52162d72-6417-4793-981b-1d2594a786d4 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 901.775432] env[61356]: WARNING nova.compute.manager [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Received unexpected event network-vif-plugged-52162d72-6417-4793-981b-1d2594a786d4 for instance with vm_state building and task_state spawning. [ 901.775966] env[61356]: DEBUG nova.compute.manager [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Received event network-changed-52162d72-6417-4793-981b-1d2594a786d4 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 901.776214] env[61356]: DEBUG nova.compute.manager [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Refreshing instance network info cache due to event network-changed-52162d72-6417-4793-981b-1d2594a786d4. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 901.777040] env[61356]: DEBUG oslo_concurrency.lockutils [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] Acquiring lock "refresh_cache-22e72346-49c6-4044-ae29-96679d4f2253" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.777040] env[61356]: DEBUG oslo_concurrency.lockutils [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] Acquired lock "refresh_cache-22e72346-49c6-4044-ae29-96679d4f2253" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.777040] env[61356]: DEBUG nova.network.neutron [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Refreshing network info cache for port 52162d72-6417-4793-981b-1d2594a786d4 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 902.131543] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302547, 'name': CreateVM_Task} progress is 25%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.179568] env[61356]: DEBUG nova.network.neutron [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Updated VIF entry in instance network info cache for port 52162d72-6417-4793-981b-1d2594a786d4. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 902.179933] env[61356]: DEBUG nova.network.neutron [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Updating instance_info_cache with network_info: [{"id": "52162d72-6417-4793-981b-1d2594a786d4", "address": "fa:16:3e:ad:7f:98", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.167", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52162d72-64", "ovs_interfaceid": "52162d72-6417-4793-981b-1d2594a786d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.190367] env[61356]: DEBUG oslo_concurrency.lockutils [req-3abe892b-e65c-4c5b-9ae1-7ab948395918 req-21fd38f5-c7f8-44f4-938e-3a71f1c59d43 service nova] Releasing lock "refresh_cache-22e72346-49c6-4044-ae29-96679d4f2253" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.633028] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302547, 'name': CreateVM_Task, 'duration_secs': 0.649258} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.633508] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 902.634362] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.635292] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.635292] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.635292] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03bb2650-98ed-40ab-a00b-3d66c64f7be4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.641338] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 902.641338] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52668940-8025-79a9-cff3-43f61bed443b" [ 902.641338] env[61356]: _type = "Task" [ 902.641338] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.651502] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52668940-8025-79a9-cff3-43f61bed443b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.154021] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.154021] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.154021] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.281212] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.281497] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.759031] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.770530] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.770773] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.770950] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.771127] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 916.772415] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d025e199-d973-4290-b09f-38c3626fc636 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.781981] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb11ace1-601a-4835-84ea-a7d4e98a519d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.796256] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff4e0e7-66d3-4c81-9e7a-c02f256d791f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.802843] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2222721-9b1e-41ca-8397-5aaa3388d0bc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.833283] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180687MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 916.833375] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.833582] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.908125] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.908299] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance eed82b3f-6590-4677-a4a2-680d769400fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.908427] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.908548] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.908667] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.908783] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.908895] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.909015] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.909137] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.909254] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 916.921329] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.932621] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.943064] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.953662] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 42716a31-0ced-493f-b7f7-5576ef2babd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.963936] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 79969f15-053a-4567-b705-fbfca33b6ee8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.974555] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b5cebdbc-f815-4b93-ac2a-27f0121966a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.984826] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4053d28b-837f-4e5c-bdbe-10f10cb3ce04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.995024] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f8c5de0-e819-4702-b871-619e221eeb4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.005133] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 612128fe-2d34-46e7-9496-f6c85dcf4ca4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.015323] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance acc5ec07-0d2f-413c-ab18-07876f43d786 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.025243] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance a76f796d-63d2-41ad-b800-629b68c00694 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.036540] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7babcc84-5bde-464c-9ac8-c832cb3887f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.047584] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.057900] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 798abe6a-1a6b-4844-b61a-fe258e6f1f04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.067367] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.067615] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 917.067762] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 917.365895] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16df9de-49c4-4f97-b185-5c51f6f9a7b9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.373406] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6ae33c-9fc1-4977-b00b-0e547e1d1314 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.404324] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4166038-6b49-4840-9352-85a1bd3a114a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.411243] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c7a890-3f73-4706-b76d-22a3a069c89d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.424081] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.432288] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 917.446197] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 917.446262] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.613s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.446762] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.758562] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.758562] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.758096] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.758434] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.758656] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.758935] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 922.758302] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.758489] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 922.758622] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 922.784684] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.784966] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.785187] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.785337] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.785464] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.785583] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.785702] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.785817] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.785932] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.786058] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 922.786237] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 923.781576] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.495589] env[61356]: WARNING oslo_vmware.rw_handles [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 940.495589] env[61356]: ERROR oslo_vmware.rw_handles [ 940.496298] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 940.498054] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 940.498317] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Copying Virtual Disk [datastore2] vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/17845b7e-de00-4b29-81cb-e6dce86801a4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 940.498586] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-163c580c-dbde-4692-b141-9033bb668df2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.506847] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Waiting for the task: (returnval){ [ 940.506847] env[61356]: value = "task-4302548" [ 940.506847] env[61356]: _type = "Task" [ 940.506847] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.514161] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Task: {'id': task-4302548, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.016791] env[61356]: DEBUG oslo_vmware.exceptions [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 941.017223] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.017742] env[61356]: ERROR nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.017742] env[61356]: Faults: ['InvalidArgument'] [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Traceback (most recent call last): [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] yield resources [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self.driver.spawn(context, instance, image_meta, [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self._fetch_image_if_missing(context, vi) [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] image_cache(vi, tmp_image_ds_loc) [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] vm_util.copy_virtual_disk( [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] session._wait_for_task(vmdk_copy_task) [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] return self.wait_for_task(task_ref) [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] return evt.wait() [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] result = hub.switch() [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] return self.greenlet.switch() [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self.f(*self.args, **self.kw) [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] raise exceptions.translate_fault(task_info.error) [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Faults: ['InvalidArgument'] [ 941.017742] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] [ 941.018739] env[61356]: INFO nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Terminating instance [ 941.020657] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 941.020859] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.021145] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.021353] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.022084] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc9184a-00e3-4001-85ca-1a89eef904e7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.024671] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94f2136d-f1fe-4c1f-ae09-43dea18e697a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.030405] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 941.030625] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42fa1309-3e39-443f-bbb5-204d9e04bf48 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.032724] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.032898] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 941.034459] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c25240c-32eb-4cbf-9df4-46645e6ef461 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.039054] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Waiting for the task: (returnval){ [ 941.039054] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52d72296-d085-f2a8-9a34-b776df7b45c0" [ 941.039054] env[61356]: _type = "Task" [ 941.039054] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.045118] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52d72296-d085-f2a8-9a34-b776df7b45c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.099469] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.099549] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.099776] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Deleting the datastore file [datastore2] b35cffdf-8926-4a6b-8118-48535e8cb8c0 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.100041] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a718d18-4321-48ea-8616-a22305ffbce9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.105916] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Waiting for the task: (returnval){ [ 941.105916] env[61356]: value = "task-4302550" [ 941.105916] env[61356]: _type = "Task" [ 941.105916] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.114792] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Task: {'id': task-4302550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.548540] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 941.548819] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Creating directory with path [datastore2] vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.549047] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b14bd285-c7b0-4d31-a5cf-636ccd51d1b0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.560008] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Created directory with path [datastore2] vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.560266] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Fetch image to [datastore2] vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 941.560448] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 941.561189] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a8fa69-ff8e-45c6-81cf-bc2bf9c22cc0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.568041] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1e0427-205b-4bd5-82fa-f75d07389a6f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.576635] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bded0d00-faa2-4049-ae5f-db0be0d773d0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.610068] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba1e66b-cd08-43f8-85f0-c78fac94ab75 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.616656] env[61356]: DEBUG oslo_vmware.api [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Task: {'id': task-4302550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076692} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.618850] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.619067] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 941.619246] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.619424] env[61356]: INFO nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 941.621581] env[61356]: DEBUG nova.compute.claims [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 941.621766] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.622014] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.625635] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-007772e1-b33c-408d-8173-c7be6bfa23e3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.646248] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 941.714494] env[61356]: DEBUG oslo_vmware.rw_handles [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 941.772945] env[61356]: DEBUG oslo_vmware.rw_handles [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 941.773147] env[61356]: DEBUG oslo_vmware.rw_handles [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 942.017541] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d41fbcd-88ee-4717-b5d7-488e09b244a9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.025857] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940a3854-5c8b-455f-b44e-772634dfed7b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.056681] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da3ab12-9c7d-4a47-a1af-ef5cde41c49d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.063789] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c332b61c-39f0-4e4c-8417-bbb5d9bb95f0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.077060] env[61356]: DEBUG nova.compute.provider_tree [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.085563] env[61356]: DEBUG nova.scheduler.client.report [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.099866] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.478s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.101189] env[61356]: ERROR nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.101189] env[61356]: Faults: ['InvalidArgument'] [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Traceback (most recent call last): [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self.driver.spawn(context, instance, image_meta, [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self._fetch_image_if_missing(context, vi) [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] image_cache(vi, tmp_image_ds_loc) [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] vm_util.copy_virtual_disk( [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] session._wait_for_task(vmdk_copy_task) [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] return self.wait_for_task(task_ref) [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] return evt.wait() [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] result = hub.switch() [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] return self.greenlet.switch() [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] self.f(*self.args, **self.kw) [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] raise exceptions.translate_fault(task_info.error) [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Faults: ['InvalidArgument'] [ 942.101189] env[61356]: ERROR nova.compute.manager [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] [ 942.103511] env[61356]: DEBUG nova.compute.utils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 942.103662] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Build of instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 was re-scheduled: A specified parameter was not correct: fileType [ 942.103662] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 942.104236] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 942.104476] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 942.104696] env[61356]: DEBUG nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 942.104920] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.467284] env[61356]: DEBUG nova.network.neutron [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.467284] env[61356]: INFO nova.compute.manager [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Took 0.36 seconds to deallocate network for instance. [ 942.636242] env[61356]: INFO nova.scheduler.client.report [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Deleted allocations for instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 [ 942.664288] env[61356]: DEBUG oslo_concurrency.lockutils [None req-33aad2d3-b163-4a88-bbc8-e0592a3b8df4 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 379.379s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.665801] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 178.075s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.666198] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Acquiring lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.666547] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.666846] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.669670] env[61356]: INFO nova.compute.manager [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Terminating instance [ 942.671721] env[61356]: DEBUG nova.compute.manager [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 942.671978] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 942.672509] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1851bf39-e797-40d9-9c98-0b6b4d93017f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.681932] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa63f39-78fd-496c-8c33-ab18727f355e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.694785] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 942.719807] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b35cffdf-8926-4a6b-8118-48535e8cb8c0 could not be found. [ 942.720742] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 942.720742] env[61356]: INFO nova.compute.manager [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Took 0.05 seconds to destroy the instance on the hypervisor. [ 942.720742] env[61356]: DEBUG oslo.service.loopingcall [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.720742] env[61356]: DEBUG nova.compute.manager [-] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 942.720742] env[61356]: DEBUG nova.network.neutron [-] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.746533] env[61356]: DEBUG nova.network.neutron [-] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.751025] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.751025] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.751025] env[61356]: INFO nova.compute.claims [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.756927] env[61356]: INFO nova.compute.manager [-] [instance: b35cffdf-8926-4a6b-8118-48535e8cb8c0] Took 0.04 seconds to deallocate network for instance. [ 942.860740] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f747d34-ef14-4e49-abd8-97f3b5c7cd82 tempest-ServerExternalEventsTest-693822474 tempest-ServerExternalEventsTest-693822474-project-member] Lock "b35cffdf-8926-4a6b-8118-48535e8cb8c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.195s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.107477] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4edf0eb-3811-4fef-a017-e45c772801f6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.115554] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a664f0-afa0-48cc-8fc0-ac1ae4735d16 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.146436] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a32252-581d-4a10-8292-787c27c3e6f3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.154170] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5a1001-9746-48e7-b86b-8806186957a1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.167385] env[61356]: DEBUG nova.compute.provider_tree [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.178101] env[61356]: DEBUG nova.scheduler.client.report [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.193027] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.444s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.193316] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 943.231344] env[61356]: DEBUG nova.compute.utils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.233256] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 943.234543] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 943.247133] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 943.312233] env[61356]: DEBUG nova.policy [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad4d77a761b64f15b7d009f02e7c3372', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41238e1d762b430b92dc4b220759c2b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 943.319727] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 943.347709] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.348135] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.348135] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.348314] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.348424] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.348570] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.348777] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.348927] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.349099] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.349261] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.349432] env[61356]: DEBUG nova.virt.hardware [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.350305] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734d808c-ad8d-4289-8863-bd98cd9f5166 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.358982] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525f3079-1878-4f36-a837-8527280de67c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.641456] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Successfully created port: 7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.555251] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Successfully updated port: 7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.571358] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "refresh_cache-4ba1ce81-6bb5-457c-8a8b-f91da2b71684" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.571612] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquired lock "refresh_cache-4ba1ce81-6bb5-457c-8a8b-f91da2b71684" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.571704] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 944.620313] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 944.791965] env[61356]: DEBUG nova.compute.manager [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Received event network-vif-plugged-7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 944.792276] env[61356]: DEBUG oslo_concurrency.lockutils [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] Acquiring lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.792406] env[61356]: DEBUG oslo_concurrency.lockutils [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.792593] env[61356]: DEBUG oslo_concurrency.lockutils [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.792708] env[61356]: DEBUG nova.compute.manager [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] No waiting events found dispatching network-vif-plugged-7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 944.792889] env[61356]: WARNING nova.compute.manager [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Received unexpected event network-vif-plugged-7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6 for instance with vm_state building and task_state spawning. [ 944.793044] env[61356]: DEBUG nova.compute.manager [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Received event network-changed-7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 944.793435] env[61356]: DEBUG nova.compute.manager [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Refreshing instance network info cache due to event network-changed-7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 944.793435] env[61356]: DEBUG oslo_concurrency.lockutils [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] Acquiring lock "refresh_cache-4ba1ce81-6bb5-457c-8a8b-f91da2b71684" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.840035] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Updating instance_info_cache with network_info: [{"id": "7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6", "address": "fa:16:3e:a9:f7:45", "network": {"id": "31576b77-b992-4126-8bc6-39bf54e39b76", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-555839202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41238e1d762b430b92dc4b220759c2b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a4dc3e6-16", "ovs_interfaceid": "7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.855235] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Releasing lock "refresh_cache-4ba1ce81-6bb5-457c-8a8b-f91da2b71684" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.855573] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Instance network_info: |[{"id": "7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6", "address": "fa:16:3e:a9:f7:45", "network": {"id": "31576b77-b992-4126-8bc6-39bf54e39b76", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-555839202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41238e1d762b430b92dc4b220759c2b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a4dc3e6-16", "ovs_interfaceid": "7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 944.855868] env[61356]: DEBUG oslo_concurrency.lockutils [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] Acquired lock "refresh_cache-4ba1ce81-6bb5-457c-8a8b-f91da2b71684" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.856059] env[61356]: DEBUG nova.network.neutron [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Refreshing network info cache for port 7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 944.857192] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:f7:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8cb478a6-872c-4a90-a8db-526b374e82ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 944.864929] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Creating folder: Project (41238e1d762b430b92dc4b220759c2b7). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 944.866093] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5e5aa46-7e92-4f1f-b362-df333657cf6f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.880087] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Created folder: Project (41238e1d762b430b92dc4b220759c2b7) in parent group-v846926. [ 944.880272] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Creating folder: Instances. Parent ref: group-v846986. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 944.880522] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6418d4bb-bb64-4a21-8c0a-dcb25c2296d4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.889234] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Created folder: Instances in parent group-v846986. [ 944.889466] env[61356]: DEBUG oslo.service.loopingcall [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.889649] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 944.889844] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da4f58ac-9948-4b1f-b42a-ba26930c8e72 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.910499] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 944.910499] env[61356]: value = "task-4302553" [ 944.910499] env[61356]: _type = "Task" [ 944.910499] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.919083] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302553, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.172342] env[61356]: DEBUG nova.network.neutron [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Updated VIF entry in instance network info cache for port 7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 945.172924] env[61356]: DEBUG nova.network.neutron [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Updating instance_info_cache with network_info: [{"id": "7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6", "address": "fa:16:3e:a9:f7:45", "network": {"id": "31576b77-b992-4126-8bc6-39bf54e39b76", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-555839202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41238e1d762b430b92dc4b220759c2b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a4dc3e6-16", "ovs_interfaceid": "7a4dc3e6-16a0-407b-b3f2-e7b962a25fa6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.183302] env[61356]: DEBUG oslo_concurrency.lockutils [req-b3e8a28e-57ab-4913-b45f-84d7a685a8c1 req-dfa52e58-a728-47f5-ae9a-cf2627e13917 service nova] Releasing lock "refresh_cache-4ba1ce81-6bb5-457c-8a8b-f91da2b71684" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.420968] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302553, 'name': CreateVM_Task, 'duration_secs': 0.294432} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.421271] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 945.421789] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.421969] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.422405] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 945.422528] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bde70bca-cb7b-4b4d-8b3f-0dac599b8736 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.426833] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Waiting for the task: (returnval){ [ 945.426833] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5291195e-63fa-5aa2-7752-cff58c721eb6" [ 945.426833] env[61356]: _type = "Task" [ 945.426833] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.434084] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5291195e-63fa-5aa2-7752-cff58c721eb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.692316] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "22e72346-49c6-4044-ae29-96679d4f2253" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.937099] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.937380] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 945.937636] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.308171] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.308577] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.645384] env[61356]: DEBUG oslo_concurrency.lockutils [None req-80bc413d-f36b-4f2b-9f7f-f5f2d18e31fd tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "2e4a6bd2-64d8-4c48-81c6-b3c749e4c270" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.645758] env[61356]: DEBUG oslo_concurrency.lockutils [None req-80bc413d-f36b-4f2b-9f7f-f5f2d18e31fd tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "2e4a6bd2-64d8-4c48-81c6-b3c749e4c270" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.094595] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ac262828-21f1-48c8-92e2-7aa69934fad6 tempest-InstanceActionsV221TestJSON-1486762965 tempest-InstanceActionsV221TestJSON-1486762965-project-member] Acquiring lock "2186aafa-5fd5-4b61-84f1-a694e20724dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.094595] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ac262828-21f1-48c8-92e2-7aa69934fad6 tempest-InstanceActionsV221TestJSON-1486762965 tempest-InstanceActionsV221TestJSON-1486762965-project-member] Lock "2186aafa-5fd5-4b61-84f1-a694e20724dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.758488] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 978.759026] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 978.759279] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 978.777229] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.777229] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.777229] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.777229] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 978.778766] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b946f5-7ac8-42a8-a837-e988214dbc25 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.790405] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93b15c5-628d-4f6f-9d1f-bd2c1e132839 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.813029] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48afedd-a3ac-42c3-b29e-c2ad167cb42b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.820795] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5f8128-0a5d-4b58-a169-b0cc62a312b4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.850627] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180697MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 978.850731] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.850963] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.951087] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance eed82b3f-6590-4677-a4a2-680d769400fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.951087] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.951256] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.951286] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.951377] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.951491] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.951612] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.951730] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.952032] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.952032] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 978.971806] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 978.990062] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.003775] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 42716a31-0ced-493f-b7f7-5576ef2babd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.015424] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 79969f15-053a-4567-b705-fbfca33b6ee8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.033154] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b5cebdbc-f815-4b93-ac2a-27f0121966a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.046326] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4053d28b-837f-4e5c-bdbe-10f10cb3ce04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.064342] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f8c5de0-e819-4702-b871-619e221eeb4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.087282] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 612128fe-2d34-46e7-9496-f6c85dcf4ca4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.097116] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance acc5ec07-0d2f-413c-ab18-07876f43d786 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.111289] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance a76f796d-63d2-41ad-b800-629b68c00694 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.125244] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7babcc84-5bde-464c-9ac8-c832cb3887f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.143920] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.161117] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 798abe6a-1a6b-4844-b61a-fe258e6f1f04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.181068] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.193207] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.205568] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2e4a6bd2-64d8-4c48-81c6-b3c749e4c270 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.217585] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2186aafa-5fd5-4b61-84f1-a694e20724dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.217870] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 979.218160] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 979.663693] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4075a328-d048-4b1e-af45-fef7f6b0dbaf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.671651] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd2de80-a928-4cc6-b535-f45cd0774a4a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.701398] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce9a813-1fcc-4075-9b2b-c42c93eef798 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.708724] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8dae462-641f-44d2-9dfa-162c2405df24 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.721933] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.733752] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.750350] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 979.750565] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.900s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.747657] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.773994] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.758708] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.758950] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.758950] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.759101] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 982.691376] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d04e7ed3-b17c-44cd-8d64-91eab9b77bad tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] Acquiring lock "90476778-373e-430f-ad56-b9e7e4d0540b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.691617] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d04e7ed3-b17c-44cd-8d64-91eab9b77bad tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] Lock "90476778-373e-430f-ad56-b9e7e4d0540b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.754147] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.757851] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.758051] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 983.758152] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 983.782163] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.782341] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.782476] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.782601] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.782720] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.782838] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.782954] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.783081] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.783199] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.783322] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 983.783441] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 983.839023] env[61356]: DEBUG oslo_concurrency.lockutils [None req-cab94cc8-f2f9-45fa-a28c-bb2bddde79e0 tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] Acquiring lock "c4aa0d72-8ad1-4a1f-9a5f-279baca944cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.839285] env[61356]: DEBUG oslo_concurrency.lockutils [None req-cab94cc8-f2f9-45fa-a28c-bb2bddde79e0 tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] Lock "c4aa0d72-8ad1-4a1f-9a5f-279baca944cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.385473] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f8fe403b-adac-4181-8970-589c2c40f127 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "9f0a73d0-c97d-4fa6-b00f-50f14763ad9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.385473] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f8fe403b-adac-4181-8970-589c2c40f127 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9f0a73d0-c97d-4fa6-b00f-50f14763ad9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.943588] env[61356]: WARNING oslo_vmware.rw_handles [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 989.943588] env[61356]: ERROR oslo_vmware.rw_handles [ 989.944407] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 989.947955] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 989.948394] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Copying Virtual Disk [datastore2] vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/0ac644ae-fb99-4d85-a167-f127923305eb/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 989.948709] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f720a088-1ac1-430d-b1ce-d383a1d85d48 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.958532] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Waiting for the task: (returnval){ [ 989.958532] env[61356]: value = "task-4302554" [ 989.958532] env[61356]: _type = "Task" [ 989.958532] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.966734] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Task: {'id': task-4302554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.467197] env[61356]: DEBUG oslo_vmware.exceptions [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 990.467465] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.468030] env[61356]: ERROR nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 990.468030] env[61356]: Faults: ['InvalidArgument'] [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Traceback (most recent call last): [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] yield resources [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self.driver.spawn(context, instance, image_meta, [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self._fetch_image_if_missing(context, vi) [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] image_cache(vi, tmp_image_ds_loc) [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] vm_util.copy_virtual_disk( [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] session._wait_for_task(vmdk_copy_task) [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] return self.wait_for_task(task_ref) [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] return evt.wait() [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] result = hub.switch() [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] return self.greenlet.switch() [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self.f(*self.args, **self.kw) [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] raise exceptions.translate_fault(task_info.error) [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Faults: ['InvalidArgument'] [ 990.468030] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] [ 990.469364] env[61356]: INFO nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Terminating instance [ 990.469906] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.470209] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.470826] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 990.471027] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 990.471407] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2630223a-1ac6-429c-99a1-af3d0a89f12d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.473811] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30247d4c-6e93-42f2-983c-c02517cd796a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.480946] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 990.482112] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2db6b35d-efdc-4914-ae89-3f05e779bb43 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.483943] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.484128] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 990.484799] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99a3fcbe-7635-44f3-ab77-2eb66b0334b5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.490315] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Waiting for the task: (returnval){ [ 990.490315] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f18200-8d9b-89e1-5201-9c9bca0f60dc" [ 990.490315] env[61356]: _type = "Task" [ 990.490315] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.502553] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f18200-8d9b-89e1-5201-9c9bca0f60dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.549949] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 990.550221] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 990.550406] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Deleting the datastore file [datastore2] eed82b3f-6590-4677-a4a2-680d769400fd {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.550678] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e6bb5d1-a29b-4791-a0d2-3ea182346e3f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.557221] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Waiting for the task: (returnval){ [ 990.557221] env[61356]: value = "task-4302556" [ 990.557221] env[61356]: _type = "Task" [ 990.557221] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.566218] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Task: {'id': task-4302556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.006066] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 991.006978] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Creating directory with path [datastore2] vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.007734] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f959f5ab-d3a5-459f-b6e3-8beee93089af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.019484] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Created directory with path [datastore2] vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.020152] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Fetch image to [datastore2] vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 991.020276] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 991.021051] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfc7491-9920-4737-9136-c09f90a7e905 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.029409] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd6c326-3067-47af-8ce0-d18d378434c8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.041332] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b62793d-2a85-4bc6-8105-da26fdc9f81a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.083401] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66783216-a3db-4185-b2cd-2cf61d713cff {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.091185] env[61356]: DEBUG oslo_vmware.api [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Task: {'id': task-4302556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073393} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.092544] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.092722] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 991.092891] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 991.093069] env[61356]: INFO nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Took 0.62 seconds to destroy the instance on the hypervisor. [ 991.095206] env[61356]: DEBUG nova.compute.claims [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 991.095377] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.095566] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.098121] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6b75c40c-d80d-43f8-9760-e9e28ec11c20 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.121970] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 991.193987] env[61356]: DEBUG oslo_vmware.rw_handles [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 991.275127] env[61356]: DEBUG oslo_vmware.rw_handles [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 991.275230] env[61356]: DEBUG oslo_vmware.rw_handles [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 991.395804] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.396222] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.745351] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6676351-3a78-4acd-922f-395a62ae9278 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.755130] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecf42c4-eee5-4b35-a0da-c8424c19654c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.786895] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa84617-5992-4c3c-8e53-71b32aaa8738 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.794260] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8a2f4c-1ffe-4af2-a795-aaee9d0c2832 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.807487] env[61356]: DEBUG nova.compute.provider_tree [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.817550] env[61356]: DEBUG nova.scheduler.client.report [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.844258] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.748s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.845191] env[61356]: ERROR nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.845191] env[61356]: Faults: ['InvalidArgument'] [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Traceback (most recent call last): [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self.driver.spawn(context, instance, image_meta, [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self._fetch_image_if_missing(context, vi) [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] image_cache(vi, tmp_image_ds_loc) [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] vm_util.copy_virtual_disk( [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] session._wait_for_task(vmdk_copy_task) [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] return self.wait_for_task(task_ref) [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] return evt.wait() [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] result = hub.switch() [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] return self.greenlet.switch() [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] self.f(*self.args, **self.kw) [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] raise exceptions.translate_fault(task_info.error) [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Faults: ['InvalidArgument'] [ 991.845191] env[61356]: ERROR nova.compute.manager [instance: eed82b3f-6590-4677-a4a2-680d769400fd] [ 991.845992] env[61356]: DEBUG nova.compute.utils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 991.849283] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Build of instance eed82b3f-6590-4677-a4a2-680d769400fd was re-scheduled: A specified parameter was not correct: fileType [ 991.849283] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 991.849374] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 991.853018] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 991.853018] env[61356]: DEBUG nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 991.853018] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 991.923811] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.640194] env[61356]: DEBUG nova.network.neutron [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.663209] env[61356]: INFO nova.compute.manager [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Took 0.81 seconds to deallocate network for instance. [ 992.781383] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b8b9b7a7-de83-4ba1-a64b-c7a4cdf05042 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "ce5f2517-fbf8-4ecc-b6f5-24c56238786b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.781437] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b8b9b7a7-de83-4ba1-a64b-c7a4cdf05042 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "ce5f2517-fbf8-4ecc-b6f5-24c56238786b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.819024] env[61356]: INFO nova.scheduler.client.report [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Deleted allocations for instance eed82b3f-6590-4677-a4a2-680d769400fd [ 992.859609] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f3a6b4fb-f7b7-4fa6-966e-ff3ec3e0aafc tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "eed82b3f-6590-4677-a4a2-680d769400fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 429.213s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.862477] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "eed82b3f-6590-4677-a4a2-680d769400fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 228.155s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.863214] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Acquiring lock "eed82b3f-6590-4677-a4a2-680d769400fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.863294] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "eed82b3f-6590-4677-a4a2-680d769400fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.865056] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "eed82b3f-6590-4677-a4a2-680d769400fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.866105] env[61356]: INFO nova.compute.manager [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Terminating instance [ 992.869516] env[61356]: DEBUG nova.compute.manager [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 992.870194] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 992.870729] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4c63fe2-2a1d-4674-9a0a-abbdbd5c5422 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.884225] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 992.890415] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99f28de-4f01-4589-a8f3-afa4c7b65056 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.927145] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eed82b3f-6590-4677-a4a2-680d769400fd could not be found. [ 992.927440] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.927664] env[61356]: INFO nova.compute.manager [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Took 0.06 seconds to destroy the instance on the hypervisor. [ 992.927880] env[61356]: DEBUG oslo.service.loopingcall [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.928135] env[61356]: DEBUG nova.compute.manager [-] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 992.928230] env[61356]: DEBUG nova.network.neutron [-] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 992.966134] env[61356]: DEBUG nova.network.neutron [-] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.979060] env[61356]: INFO nova.compute.manager [-] [instance: eed82b3f-6590-4677-a4a2-680d769400fd] Took 0.05 seconds to deallocate network for instance. [ 992.996063] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.996063] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.997658] env[61356]: INFO nova.compute.claims [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.118654] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07123851-344e-42da-95f6-9fff68c76d66 tempest-TenantUsagesTestJSON-2123171115 tempest-TenantUsagesTestJSON-2123171115-project-member] Lock "eed82b3f-6590-4677-a4a2-680d769400fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.256s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.556141] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1829b193-e6cb-46c4-8862-6d5bedfdbb42 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.563611] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df2fdbe-0975-4d2d-b5cc-74872cd919d3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.594634] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dae1cb1-e0b1-47de-a809-24ff54d131c2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.602370] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ea1c94-e5e1-4633-8cf4-a82118c6f7a0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.616766] env[61356]: DEBUG nova.compute.provider_tree [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.625625] env[61356]: DEBUG nova.scheduler.client.report [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.642848] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.647s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.643360] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 993.680216] env[61356]: DEBUG nova.compute.utils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 993.681531] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 993.681733] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 993.693996] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 993.746474] env[61356]: DEBUG nova.policy [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39a6d62c94e84721a98fca5c0b2412c0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e90c65dabdd44f6f9c6e524fbf01f380', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 993.766814] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 993.807176] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 993.807440] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 993.807619] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 993.807809] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 993.808009] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 993.808213] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 993.808429] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 993.808589] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 993.809311] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 993.809311] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 993.809311] env[61356]: DEBUG nova.virt.hardware [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.809994] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d2abc2-e5b2-44c8-a78d-efc9aa5fd0fa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.817941] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ab0e5f-d1db-43a2-a808-93dfa09c9b81 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.128190] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Successfully created port: 0c0b3b24-7ae2-4075-a565-5dc32cadacff {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 994.824763] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Successfully updated port: 0c0b3b24-7ae2-4075-a565-5dc32cadacff {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 994.851402] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "refresh_cache-1353d492-e9f0-4495-83e1-6832bb03d5db" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.851580] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired lock "refresh_cache-1353d492-e9f0-4495-83e1-6832bb03d5db" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.851734] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 994.906700] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.959126] env[61356]: DEBUG nova.compute.manager [req-90768a3a-b9f7-4202-bb5d-000502deacf1 req-a6a5311e-6ff6-4455-8fe8-f8a2bd838426 service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Received event network-vif-plugged-0c0b3b24-7ae2-4075-a565-5dc32cadacff {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 994.959391] env[61356]: DEBUG oslo_concurrency.lockutils [req-90768a3a-b9f7-4202-bb5d-000502deacf1 req-a6a5311e-6ff6-4455-8fe8-f8a2bd838426 service nova] Acquiring lock "1353d492-e9f0-4495-83e1-6832bb03d5db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.964245] env[61356]: DEBUG oslo_concurrency.lockutils [req-90768a3a-b9f7-4202-bb5d-000502deacf1 req-a6a5311e-6ff6-4455-8fe8-f8a2bd838426 service nova] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.964245] env[61356]: DEBUG oslo_concurrency.lockutils [req-90768a3a-b9f7-4202-bb5d-000502deacf1 req-a6a5311e-6ff6-4455-8fe8-f8a2bd838426 service nova] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.964245] env[61356]: DEBUG nova.compute.manager [req-90768a3a-b9f7-4202-bb5d-000502deacf1 req-a6a5311e-6ff6-4455-8fe8-f8a2bd838426 service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] No waiting events found dispatching network-vif-plugged-0c0b3b24-7ae2-4075-a565-5dc32cadacff {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 994.964245] env[61356]: WARNING nova.compute.manager [req-90768a3a-b9f7-4202-bb5d-000502deacf1 req-a6a5311e-6ff6-4455-8fe8-f8a2bd838426 service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Received unexpected event network-vif-plugged-0c0b3b24-7ae2-4075-a565-5dc32cadacff for instance with vm_state building and task_state spawning. [ 995.119752] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Updating instance_info_cache with network_info: [{"id": "0c0b3b24-7ae2-4075-a565-5dc32cadacff", "address": "fa:16:3e:65:fc:86", "network": {"id": "faeb66a5-72f9-4276-8490-83a2eef57844", "bridge": "br-int", "label": "tempest-ImagesTestJSON-444094988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90c65dabdd44f6f9c6e524fbf01f380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c0b3b24-7a", "ovs_interfaceid": "0c0b3b24-7ae2-4075-a565-5dc32cadacff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.131593] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Releasing lock "refresh_cache-1353d492-e9f0-4495-83e1-6832bb03d5db" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.131878] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Instance network_info: |[{"id": "0c0b3b24-7ae2-4075-a565-5dc32cadacff", "address": "fa:16:3e:65:fc:86", "network": {"id": "faeb66a5-72f9-4276-8490-83a2eef57844", "bridge": "br-int", "label": "tempest-ImagesTestJSON-444094988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90c65dabdd44f6f9c6e524fbf01f380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c0b3b24-7a", "ovs_interfaceid": "0c0b3b24-7ae2-4075-a565-5dc32cadacff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 995.132308] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:fc:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c0b3b24-7ae2-4075-a565-5dc32cadacff', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.140443] env[61356]: DEBUG oslo.service.loopingcall [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.140864] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 995.141102] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-472901ca-843e-43f7-9e6d-7ed0a8478672 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.161757] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.161757] env[61356]: value = "task-4302557" [ 995.161757] env[61356]: _type = "Task" [ 995.161757] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.169611] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302557, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.672048] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302557, 'name': CreateVM_Task, 'duration_secs': 0.327444} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.672220] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 995.672908] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.673077] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.673423] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.673688] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9de87c67-d020-4774-9860-9153ed080378 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.679073] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 995.679073] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]523e9e3d-8a67-88b9-3f16-adfb0d04ddb0" [ 995.679073] env[61356]: _type = "Task" [ 995.679073] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.687215] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]523e9e3d-8a67-88b9-3f16-adfb0d04ddb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.189688] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.189933] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.190612] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.094685] env[61356]: DEBUG nova.compute.manager [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Received event network-changed-0c0b3b24-7ae2-4075-a565-5dc32cadacff {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 997.094850] env[61356]: DEBUG nova.compute.manager [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Refreshing instance network info cache due to event network-changed-0c0b3b24-7ae2-4075-a565-5dc32cadacff. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 997.095082] env[61356]: DEBUG oslo_concurrency.lockutils [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] Acquiring lock "refresh_cache-1353d492-e9f0-4495-83e1-6832bb03d5db" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.095228] env[61356]: DEBUG oslo_concurrency.lockutils [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] Acquired lock "refresh_cache-1353d492-e9f0-4495-83e1-6832bb03d5db" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.095389] env[61356]: DEBUG nova.network.neutron [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Refreshing network info cache for port 0c0b3b24-7ae2-4075-a565-5dc32cadacff {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 997.244709] env[61356]: DEBUG oslo_concurrency.lockutils [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "1353d492-e9f0-4495-83e1-6832bb03d5db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.501997] env[61356]: DEBUG nova.network.neutron [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Updated VIF entry in instance network info cache for port 0c0b3b24-7ae2-4075-a565-5dc32cadacff. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 997.502366] env[61356]: DEBUG nova.network.neutron [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Updating instance_info_cache with network_info: [{"id": "0c0b3b24-7ae2-4075-a565-5dc32cadacff", "address": "fa:16:3e:65:fc:86", "network": {"id": "faeb66a5-72f9-4276-8490-83a2eef57844", "bridge": "br-int", "label": "tempest-ImagesTestJSON-444094988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90c65dabdd44f6f9c6e524fbf01f380", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c0b3b24-7a", "ovs_interfaceid": "0c0b3b24-7ae2-4075-a565-5dc32cadacff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.511983] env[61356]: DEBUG oslo_concurrency.lockutils [req-397b645a-2b77-4ef4-9b68-a1e9c60a3d53 req-d7c2e0f5-ec2d-4754-9d53-b29a8b1ead2a service nova] Releasing lock "refresh_cache-1353d492-e9f0-4495-83e1-6832bb03d5db" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.330569] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d8d6870-62c5-41e2-9145-b8ed903f3eed tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] Acquiring lock "508170e0-afe4-4ff5-94cc-d7b093cfd7d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.330890] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d8d6870-62c5-41e2-9145-b8ed903f3eed tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] Lock "508170e0-afe4-4ff5-94cc-d7b093cfd7d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.017906] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3002e83c-56a8-4f45-bc8a-b932af2b571f tempest-InstanceActionsNegativeTestJSON-1868473628 tempest-InstanceActionsNegativeTestJSON-1868473628-project-member] Acquiring lock "bd723957-6a15-4d23-88a4-c708778bee39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.018272] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3002e83c-56a8-4f45-bc8a-b932af2b571f tempest-InstanceActionsNegativeTestJSON-1868473628 tempest-InstanceActionsNegativeTestJSON-1868473628-project-member] Lock "bd723957-6a15-4d23-88a4-c708778bee39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.018471] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8dab85de-0ef6-453e-a081-8722062b2bf8 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Acquiring lock "c63173e3-52d8-4412-a225-37b67187126a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.018658] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8dab85de-0ef6-453e-a081-8722062b2bf8 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Lock "c63173e3-52d8-4412-a225-37b67187126a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.754395] env[61356]: DEBUG oslo_concurrency.lockutils [None req-54870c06-6207-4262-95ec-1b2d0fd2557f tempest-ServerShowV254Test-864364288 tempest-ServerShowV254Test-864364288-project-member] Acquiring lock "03f22cd3-fea8-43be-a4db-f65521c832a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.754665] env[61356]: DEBUG oslo_concurrency.lockutils [None req-54870c06-6207-4262-95ec-1b2d0fd2557f tempest-ServerShowV254Test-864364288 tempest-ServerShowV254Test-864364288-project-member] Lock "03f22cd3-fea8-43be-a4db-f65521c832a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.758716] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1038.392539] env[61356]: WARNING oslo_vmware.rw_handles [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1038.392539] env[61356]: ERROR oslo_vmware.rw_handles [ 1038.392941] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1038.394871] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1038.395173] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Copying Virtual Disk [datastore2] vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/e6cae832-ab58-4869-93d3-d8bd5aa77659/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1038.395494] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adbbd94e-cd42-4455-a3b4-c189902ae3b8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.404174] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Waiting for the task: (returnval){ [ 1038.404174] env[61356]: value = "task-4302558" [ 1038.404174] env[61356]: _type = "Task" [ 1038.404174] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.412756] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Task: {'id': task-4302558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.470022] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "3bc9faac-35bb-4303-937f-3682771e9258" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.470022] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "3bc9faac-35bb-4303-937f-3682771e9258" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.915110] env[61356]: DEBUG oslo_vmware.exceptions [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1038.915495] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.916187] env[61356]: ERROR nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1038.916187] env[61356]: Faults: ['InvalidArgument'] [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Traceback (most recent call last): [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] yield resources [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self.driver.spawn(context, instance, image_meta, [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self._fetch_image_if_missing(context, vi) [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] image_cache(vi, tmp_image_ds_loc) [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] vm_util.copy_virtual_disk( [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] session._wait_for_task(vmdk_copy_task) [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] return self.wait_for_task(task_ref) [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] return evt.wait() [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] result = hub.switch() [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] return self.greenlet.switch() [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self.f(*self.args, **self.kw) [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] raise exceptions.translate_fault(task_info.error) [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Faults: ['InvalidArgument'] [ 1038.916187] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] [ 1038.917491] env[61356]: INFO nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Terminating instance [ 1038.918637] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.918687] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.919292] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d53f75a2-83ac-4d8b-8627-b5838fe38b65 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.925425] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1038.925561] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1038.926385] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808c5a3b-01b0-48b0-bd5c-3a4250c6eba2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.933844] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1038.934189] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a54e7a6-a204-468c-b0d8-a957dcec9c94 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.936817] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.936986] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1038.937990] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e9598f4-2575-4bc8-9a6d-2acbd700255e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.942723] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Waiting for the task: (returnval){ [ 1038.942723] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527ba814-fbab-70a4-ccef-dfd83eea2130" [ 1038.942723] env[61356]: _type = "Task" [ 1038.942723] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.957207] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1038.957474] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Creating directory with path [datastore2] vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.957697] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa34a91f-e699-412d-9426-e2c01acadd58 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.977818] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Created directory with path [datastore2] vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.978053] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Fetch image to [datastore2] vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1038.978237] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1038.979125] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d89e7cd-aaca-454b-aa92-e1625e42136d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.987695] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937508a4-5639-4401-bb3f-e6b0f46eb0ab {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.996729] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c772b6b1-4433-4c96-bb05-5116ddee6fb5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.031048] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48299359-216d-47cd-bd59-456ee81aee54 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.033615] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1039.033760] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1039.033936] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Deleting the datastore file [datastore2] 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1039.034622] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0eaf894-46df-4c34-b07b-9505d9d259f2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.038984] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f2c3484c-b95c-4908-88d6-0933826af250 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.041750] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Waiting for the task: (returnval){ [ 1039.041750] env[61356]: value = "task-4302560" [ 1039.041750] env[61356]: _type = "Task" [ 1039.041750] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.049857] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Task: {'id': task-4302560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.072860] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1039.136330] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1039.198019] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1039.198019] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1039.552739] env[61356]: DEBUG oslo_vmware.api [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Task: {'id': task-4302560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074217} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.553010] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1039.553205] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1039.553379] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1039.553558] env[61356]: INFO nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1039.555726] env[61356]: DEBUG nova.compute.claims [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1039.555898] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.556121] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.758472] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.759401] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.759614] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1039.771695] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.978372] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0033e5-a661-4800-93f2-3ce6c8bcdaa2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.986246] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c646beb7-0238-454d-a424-a4cd7c33f25b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.018556] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaea28e3-2b8f-4b26-9fb0-956e10beab5c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.026036] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac6b3a9-d890-4c62-a586-32c1163268ac {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.039212] env[61356]: DEBUG nova.compute.provider_tree [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.047687] env[61356]: DEBUG nova.scheduler.client.report [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1040.063392] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.507s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.063655] env[61356]: ERROR nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.063655] env[61356]: Faults: ['InvalidArgument'] [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Traceback (most recent call last): [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self.driver.spawn(context, instance, image_meta, [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self._fetch_image_if_missing(context, vi) [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] image_cache(vi, tmp_image_ds_loc) [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] vm_util.copy_virtual_disk( [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] session._wait_for_task(vmdk_copy_task) [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] return self.wait_for_task(task_ref) [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] return evt.wait() [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] result = hub.switch() [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] return self.greenlet.switch() [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] self.f(*self.args, **self.kw) [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] raise exceptions.translate_fault(task_info.error) [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Faults: ['InvalidArgument'] [ 1040.063655] env[61356]: ERROR nova.compute.manager [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] [ 1040.065240] env[61356]: DEBUG nova.compute.utils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1040.066213] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.294s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.066213] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.066213] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1040.066528] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Build of instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab was re-scheduled: A specified parameter was not correct: fileType [ 1040.066528] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1040.066991] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1040.067177] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1040.067261] env[61356]: DEBUG nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1040.067453] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1040.069652] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b6c9a1-b644-46b9-a0a0-9e8f6a84e317 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.078178] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3eba43-cd7b-4488-ba50-f59a73b59f47 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.092818] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ba1380-befb-40f3-b931-ec4551b775b1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.099108] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0540dfd-8154-40cc-b67c-4792f3debc8b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.129687] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180695MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1040.129846] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.130057] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.229269] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.229440] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.229566] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.229686] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.229803] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.229917] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.230041] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.230157] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.230531] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.230531] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1040.245576] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.256905] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 798abe6a-1a6b-4844-b61a-fe258e6f1f04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.271158] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.284064] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.301450] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2e4a6bd2-64d8-4c48-81c6-b3c749e4c270 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.315254] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2186aafa-5fd5-4b61-84f1-a694e20724dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.328309] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 90476778-373e-430f-ad56-b9e7e4d0540b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.339256] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c4aa0d72-8ad1-4a1f-9a5f-279baca944cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.353240] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f0a73d0-c97d-4fa6-b00f-50f14763ad9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.364834] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.377230] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ce5f2517-fbf8-4ecc-b6f5-24c56238786b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.388611] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 508170e0-afe4-4ff5-94cc-d7b093cfd7d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.401395] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bd723957-6a15-4d23-88a4-c708778bee39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.416395] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c63173e3-52d8-4412-a225-37b67187126a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.431506] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 03f22cd3-fea8-43be-a4db-f65521c832a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.442397] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.442636] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1040.442780] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=110GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1040.582219] env[61356]: DEBUG nova.network.neutron [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.599527] env[61356]: INFO nova.compute.manager [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Took 0.53 seconds to deallocate network for instance. [ 1040.727205] env[61356]: INFO nova.scheduler.client.report [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Deleted allocations for instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab [ 1040.750508] env[61356]: DEBUG oslo_concurrency.lockutils [None req-96d91604-4cf7-475b-9e11-e9b77db943b6 tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 469.897s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.751711] env[61356]: DEBUG oslo_concurrency.lockutils [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 270.358s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.751869] env[61356]: DEBUG oslo_concurrency.lockutils [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Acquiring lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.752075] env[61356]: DEBUG oslo_concurrency.lockutils [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.752248] env[61356]: DEBUG oslo_concurrency.lockutils [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.756116] env[61356]: INFO nova.compute.manager [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Terminating instance [ 1040.758260] env[61356]: DEBUG nova.compute.manager [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1040.758442] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1040.758760] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85cbc456-3214-4b4c-babf-e103cec886f7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.765029] env[61356]: DEBUG nova.compute.manager [None req-8dba2bfb-0a72-4f20-9a77-6531b60e2034 tempest-FloatingIPsAssociationNegativeTestJSON-792199732 tempest-FloatingIPsAssociationNegativeTestJSON-792199732-project-member] [instance: 7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.774536] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55404ea-39c8-4ded-96e0-e06c4a65f957 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.805500] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab could not be found. [ 1040.805731] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1040.805909] env[61356]: INFO nova.compute.manager [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1040.806169] env[61356]: DEBUG oslo.service.loopingcall [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.809087] env[61356]: DEBUG nova.compute.manager [None req-8dba2bfb-0a72-4f20-9a77-6531b60e2034 tempest-FloatingIPsAssociationNegativeTestJSON-792199732 tempest-FloatingIPsAssociationNegativeTestJSON-792199732-project-member] [instance: 7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.810335] env[61356]: DEBUG nova.compute.manager [-] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1040.810335] env[61356]: DEBUG nova.network.neutron [-] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1040.833240] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8dba2bfb-0a72-4f20-9a77-6531b60e2034 tempest-FloatingIPsAssociationNegativeTestJSON-792199732 tempest-FloatingIPsAssociationNegativeTestJSON-792199732-project-member] Lock "7cbe1763-abb0-45a1-9a9e-5cd7c5ebd367" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.768s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.838030] env[61356]: DEBUG nova.network.neutron [-] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.842699] env[61356]: DEBUG nova.compute.manager [None req-da479aa7-0997-4ce9-a751-472048829ba9 tempest-ServersTestManualDisk-1291211396 tempest-ServersTestManualDisk-1291211396-project-member] [instance: 42716a31-0ced-493f-b7f7-5576ef2babd2] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.849506] env[61356]: INFO nova.compute.manager [-] [instance: 8cfa7077-6dcb-49f3-b187-9ad9f7c418ab] Took 0.04 seconds to deallocate network for instance. [ 1040.882672] env[61356]: DEBUG nova.compute.manager [None req-da479aa7-0997-4ce9-a751-472048829ba9 tempest-ServersTestManualDisk-1291211396 tempest-ServersTestManualDisk-1291211396-project-member] [instance: 42716a31-0ced-493f-b7f7-5576ef2babd2] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.901472] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e13e17-1a2e-45c1-a278-19f54aaf8766 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.912439] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b502a3e3-1595-4f3f-8581-dd05b6f57196 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.919685] env[61356]: DEBUG oslo_concurrency.lockutils [None req-da479aa7-0997-4ce9-a751-472048829ba9 tempest-ServersTestManualDisk-1291211396 tempest-ServersTestManualDisk-1291211396-project-member] Lock "42716a31-0ced-493f-b7f7-5576ef2babd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.068s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.970123] env[61356]: DEBUG nova.compute.manager [None req-475f2f4d-6434-45d2-8b3e-e7ce89e155fb tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] [instance: 79969f15-053a-4567-b705-fbfca33b6ee8] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.973093] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4c11c6-412b-49d7-8d7d-fa67077eb001 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.984437] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac9ff12-81b2-4cb5-bd75-4f5706976931 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.001531] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.005543] env[61356]: DEBUG nova.compute.manager [None req-475f2f4d-6434-45d2-8b3e-e7ce89e155fb tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] [instance: 79969f15-053a-4567-b705-fbfca33b6ee8] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.014512] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.035756] env[61356]: DEBUG oslo_concurrency.lockutils [None req-475f2f4d-6434-45d2-8b3e-e7ce89e155fb tempest-SecurityGroupsTestJSON-940433817 tempest-SecurityGroupsTestJSON-940433817-project-member] Lock "79969f15-053a-4567-b705-fbfca33b6ee8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.583s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.038971] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1041.039201] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.909s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.042298] env[61356]: DEBUG oslo_concurrency.lockutils [None req-87babc7e-f19f-4f82-871e-cc1fbfed7eda tempest-ImagesOneServerNegativeTestJSON-344867930 tempest-ImagesOneServerNegativeTestJSON-344867930-project-member] Lock "8cfa7077-6dcb-49f3-b187-9ad9f7c418ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.291s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.049126] env[61356]: DEBUG nova.compute.manager [None req-e275a1b1-f4aa-4f07-8ec2-ab6f084ac02e tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] [instance: b5cebdbc-f815-4b93-ac2a-27f0121966a6] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.075246] env[61356]: DEBUG nova.compute.manager [None req-e275a1b1-f4aa-4f07-8ec2-ab6f084ac02e tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] [instance: b5cebdbc-f815-4b93-ac2a-27f0121966a6] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.117465] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e275a1b1-f4aa-4f07-8ec2-ab6f084ac02e tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Lock "b5cebdbc-f815-4b93-ac2a-27f0121966a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.722s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.129453] env[61356]: DEBUG nova.compute.manager [None req-71b143b2-2700-418e-bf81-def98c010a19 tempest-ServerMetadataNegativeTestJSON-1051451279 tempest-ServerMetadataNegativeTestJSON-1051451279-project-member] [instance: 4053d28b-837f-4e5c-bdbe-10f10cb3ce04] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.151963] env[61356]: DEBUG nova.compute.manager [None req-71b143b2-2700-418e-bf81-def98c010a19 tempest-ServerMetadataNegativeTestJSON-1051451279 tempest-ServerMetadataNegativeTestJSON-1051451279-project-member] [instance: 4053d28b-837f-4e5c-bdbe-10f10cb3ce04] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.172078] env[61356]: DEBUG oslo_concurrency.lockutils [None req-71b143b2-2700-418e-bf81-def98c010a19 tempest-ServerMetadataNegativeTestJSON-1051451279 tempest-ServerMetadataNegativeTestJSON-1051451279-project-member] Lock "4053d28b-837f-4e5c-bdbe-10f10cb3ce04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.002s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.181834] env[61356]: DEBUG nova.compute.manager [None req-7d7e349c-fb99-4739-b720-d82cc883c6e1 tempest-ServersNegativeTestMultiTenantJSON-1341881425 tempest-ServersNegativeTestMultiTenantJSON-1341881425-project-member] [instance: 4f8c5de0-e819-4702-b871-619e221eeb4c] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.209535] env[61356]: DEBUG nova.compute.manager [None req-7d7e349c-fb99-4739-b720-d82cc883c6e1 tempest-ServersNegativeTestMultiTenantJSON-1341881425 tempest-ServersNegativeTestMultiTenantJSON-1341881425-project-member] [instance: 4f8c5de0-e819-4702-b871-619e221eeb4c] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.231359] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d7e349c-fb99-4739-b720-d82cc883c6e1 tempest-ServersNegativeTestMultiTenantJSON-1341881425 tempest-ServersNegativeTestMultiTenantJSON-1341881425-project-member] Lock "4f8c5de0-e819-4702-b871-619e221eeb4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.646s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.239599] env[61356]: DEBUG nova.compute.manager [None req-c271f1af-2273-4b4b-88b6-f86c13eaf1d6 tempest-ServersV294TestFqdnHostnames-1674490250 tempest-ServersV294TestFqdnHostnames-1674490250-project-member] [instance: 612128fe-2d34-46e7-9496-f6c85dcf4ca4] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.276630] env[61356]: DEBUG nova.compute.manager [None req-c271f1af-2273-4b4b-88b6-f86c13eaf1d6 tempest-ServersV294TestFqdnHostnames-1674490250 tempest-ServersV294TestFqdnHostnames-1674490250-project-member] [instance: 612128fe-2d34-46e7-9496-f6c85dcf4ca4] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.301901] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c271f1af-2273-4b4b-88b6-f86c13eaf1d6 tempest-ServersV294TestFqdnHostnames-1674490250 tempest-ServersV294TestFqdnHostnames-1674490250-project-member] Lock "612128fe-2d34-46e7-9496-f6c85dcf4ca4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.066s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.312314] env[61356]: DEBUG nova.compute.manager [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: acc5ec07-0d2f-413c-ab18-07876f43d786] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.336515] env[61356]: DEBUG nova.compute.manager [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: acc5ec07-0d2f-413c-ab18-07876f43d786] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.359570] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "acc5ec07-0d2f-413c-ab18-07876f43d786" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.138s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.370053] env[61356]: DEBUG nova.compute.manager [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: a76f796d-63d2-41ad-b800-629b68c00694] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.397568] env[61356]: DEBUG nova.compute.manager [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] [instance: a76f796d-63d2-41ad-b800-629b68c00694] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.421172] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ca32e057-fc07-4626-aa16-9d30833f8b86 tempest-MultipleCreateTestJSON-617772438 tempest-MultipleCreateTestJSON-617772438-project-member] Lock "a76f796d-63d2-41ad-b800-629b68c00694" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.174s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.432487] env[61356]: DEBUG nova.compute.manager [None req-367571d2-573e-4b22-942c-29bd9fc18980 tempest-InstanceActionsTestJSON-1335029912 tempest-InstanceActionsTestJSON-1335029912-project-member] [instance: 7babcc84-5bde-464c-9ac8-c832cb3887f6] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.457709] env[61356]: DEBUG nova.compute.manager [None req-367571d2-573e-4b22-942c-29bd9fc18980 tempest-InstanceActionsTestJSON-1335029912 tempest-InstanceActionsTestJSON-1335029912-project-member] [instance: 7babcc84-5bde-464c-9ac8-c832cb3887f6] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1041.479706] env[61356]: DEBUG oslo_concurrency.lockutils [None req-367571d2-573e-4b22-942c-29bd9fc18980 tempest-InstanceActionsTestJSON-1335029912 tempest-InstanceActionsTestJSON-1335029912-project-member] Lock "7babcc84-5bde-464c-9ac8-c832cb3887f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.300s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.489902] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1041.548389] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.548971] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.550333] env[61356]: INFO nova.compute.claims [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.970242] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7021e213-8504-4ad4-a4dd-6db76e631c49 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.978521] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b28bde-4a1e-4ff6-a1c7-e82374b87932 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.007976] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e05f69-d6db-4c9d-a78b-acd7db1f4868 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.015564] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0268eaeb-0a3c-4315-b9b2-8522c8c6c993 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.029580] env[61356]: DEBUG nova.compute.provider_tree [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.040620] env[61356]: DEBUG nova.scheduler.client.report [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.063585] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.515s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.063941] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1042.115430] env[61356]: DEBUG nova.compute.utils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1042.116967] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1042.116967] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1042.133472] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1042.235587] env[61356]: DEBUG nova.policy [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4b291efe18d4e2cb00fdb292df95605', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69e402fe444647c590c23f71f049dbfb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1042.250916] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1042.276863] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1042.277150] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1042.277432] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.277607] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1042.277768] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.277920] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1042.278276] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1042.278463] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1042.279106] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1042.279318] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1042.279503] env[61356]: DEBUG nova.virt.hardware [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1042.280392] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cc713e-5fe0-40d0-a6b1-6963a13c729f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.289120] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94071355-a9fd-44b8-9c40-d0a8b9f83ad0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.055153] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Successfully created port: 47b9f821-e6b3-438b-b212-b690f6ea7700 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.794500] env[61356]: DEBUG nova.compute.manager [req-5bbb43e2-23b3-4ab1-9889-9b08e9ab6521 req-0bc913ac-81ce-40ea-b435-abb4eb52a577 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Received event network-vif-plugged-47b9f821-e6b3-438b-b212-b690f6ea7700 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1043.794781] env[61356]: DEBUG oslo_concurrency.lockutils [req-5bbb43e2-23b3-4ab1-9889-9b08e9ab6521 req-0bc913ac-81ce-40ea-b435-abb4eb52a577 service nova] Acquiring lock "498180c6-cc07-4c5a-bf1e-784148909d6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.794993] env[61356]: DEBUG oslo_concurrency.lockutils [req-5bbb43e2-23b3-4ab1-9889-9b08e9ab6521 req-0bc913ac-81ce-40ea-b435-abb4eb52a577 service nova] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.795181] env[61356]: DEBUG oslo_concurrency.lockutils [req-5bbb43e2-23b3-4ab1-9889-9b08e9ab6521 req-0bc913ac-81ce-40ea-b435-abb4eb52a577 service nova] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.795357] env[61356]: DEBUG nova.compute.manager [req-5bbb43e2-23b3-4ab1-9889-9b08e9ab6521 req-0bc913ac-81ce-40ea-b435-abb4eb52a577 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] No waiting events found dispatching network-vif-plugged-47b9f821-e6b3-438b-b212-b690f6ea7700 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1043.795529] env[61356]: WARNING nova.compute.manager [req-5bbb43e2-23b3-4ab1-9889-9b08e9ab6521 req-0bc913ac-81ce-40ea-b435-abb4eb52a577 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Received unexpected event network-vif-plugged-47b9f821-e6b3-438b-b212-b690f6ea7700 for instance with vm_state building and task_state spawning. [ 1043.862054] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Successfully updated port: 47b9f821-e6b3-438b-b212-b690f6ea7700 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1043.872243] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "refresh_cache-498180c6-cc07-4c5a-bf1e-784148909d6a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.872391] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquired lock "refresh_cache-498180c6-cc07-4c5a-bf1e-784148909d6a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.872538] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1043.924442] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1044.037900] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.038160] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.038323] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.038478] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.038622] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1044.093942] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Updating instance_info_cache with network_info: [{"id": "47b9f821-e6b3-438b-b212-b690f6ea7700", "address": "fa:16:3e:7f:81:d1", "network": {"id": "6a6dab54-6313-4f89-a536-70a4c5579058", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-256948147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "69e402fe444647c590c23f71f049dbfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b9f821-e6", "ovs_interfaceid": "47b9f821-e6b3-438b-b212-b690f6ea7700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.106754] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Releasing lock "refresh_cache-498180c6-cc07-4c5a-bf1e-784148909d6a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1044.107062] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Instance network_info: |[{"id": "47b9f821-e6b3-438b-b212-b690f6ea7700", "address": "fa:16:3e:7f:81:d1", "network": {"id": "6a6dab54-6313-4f89-a536-70a4c5579058", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-256948147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "69e402fe444647c590c23f71f049dbfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b9f821-e6", "ovs_interfaceid": "47b9f821-e6b3-438b-b212-b690f6ea7700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1044.107501] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:81:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a10c88d7-d13f-44fd-acee-7a734eb5f56a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47b9f821-e6b3-438b-b212-b690f6ea7700', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.118507] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Creating folder: Project (69e402fe444647c590c23f71f049dbfb). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1044.118507] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc46ed60-f116-4604-862b-c938d38962e1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.128911] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Created folder: Project (69e402fe444647c590c23f71f049dbfb) in parent group-v846926. [ 1044.129278] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Creating folder: Instances. Parent ref: group-v846990. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1044.129627] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69e19f16-4eef-4dae-85d8-f2cfcb9ad48e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.141523] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Created folder: Instances in parent group-v846990. [ 1044.141892] env[61356]: DEBUG oslo.service.loopingcall [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.142235] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1044.142451] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da1570b5-365a-45a7-9682-cbbd3bc7d006 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.168157] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.168157] env[61356]: value = "task-4302563" [ 1044.168157] env[61356]: _type = "Task" [ 1044.168157] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.177438] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302563, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.683608] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302563, 'name': CreateVM_Task, 'duration_secs': 0.381712} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.683803] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1044.684937] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.685176] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.685549] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1044.685953] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4a4d152-32a2-4487-af3f-61a4d19856ad {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.690681] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Waiting for the task: (returnval){ [ 1044.690681] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5242056b-82aa-ce0f-fee1-b6b118d1c86e" [ 1044.690681] env[61356]: _type = "Task" [ 1044.690681] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.698585] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5242056b-82aa-ce0f-fee1-b6b118d1c86e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.760586] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.760784] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1044.760912] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1044.799481] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.799710] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.799848] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.799974] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.800112] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.800241] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.800361] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.800657] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.800657] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.800750] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1044.800822] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1045.202329] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.202629] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.202847] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.042837] env[61356]: DEBUG nova.compute.manager [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Received event network-changed-47b9f821-e6b3-438b-b212-b690f6ea7700 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1046.043046] env[61356]: DEBUG nova.compute.manager [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Refreshing instance network info cache due to event network-changed-47b9f821-e6b3-438b-b212-b690f6ea7700. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1046.043258] env[61356]: DEBUG oslo_concurrency.lockutils [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] Acquiring lock "refresh_cache-498180c6-cc07-4c5a-bf1e-784148909d6a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.043399] env[61356]: DEBUG oslo_concurrency.lockutils [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] Acquired lock "refresh_cache-498180c6-cc07-4c5a-bf1e-784148909d6a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.043561] env[61356]: DEBUG nova.network.neutron [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Refreshing network info cache for port 47b9f821-e6b3-438b-b212-b690f6ea7700 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.150710] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "7d397e6b-a49f-410a-afcc-23c4c462024b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.150710] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.341229] env[61356]: DEBUG nova.network.neutron [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Updated VIF entry in instance network info cache for port 47b9f821-e6b3-438b-b212-b690f6ea7700. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1046.341229] env[61356]: DEBUG nova.network.neutron [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Updating instance_info_cache with network_info: [{"id": "47b9f821-e6b3-438b-b212-b690f6ea7700", "address": "fa:16:3e:7f:81:d1", "network": {"id": "6a6dab54-6313-4f89-a536-70a4c5579058", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-256948147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "69e402fe444647c590c23f71f049dbfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a10c88d7-d13f-44fd-acee-7a734eb5f56a", "external-id": "nsx-vlan-transportzone-766", "segmentation_id": 766, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47b9f821-e6", "ovs_interfaceid": "47b9f821-e6b3-438b-b212-b690f6ea7700", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.353759] env[61356]: DEBUG oslo_concurrency.lockutils [req-3620c19e-88ff-4d90-8130-aadaccd71a6d req-a6afbb32-6af0-4d1d-acb5-0ec6e09936a8 service nova] Releasing lock "refresh_cache-498180c6-cc07-4c5a-bf1e-784148909d6a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.399938] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "498180c6-cc07-4c5a-bf1e-784148909d6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.039632] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6b649a2c-7c1f-4d12-b08b-1c48497e9a02 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "1fe3a0c0-6d61-450a-8754-ce82f6b24661" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.039924] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6b649a2c-7c1f-4d12-b08b-1c48497e9a02 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "1fe3a0c0-6d61-450a-8754-ce82f6b24661" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.326128] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16e1b0f1-2f41-40cc-a2a2-853d11f91c6d tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] Acquiring lock "da968e6f-ef0a-41b5-b4a5-d11c096ad018" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.326452] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16e1b0f1-2f41-40cc-a2a2-853d11f91c6d tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] Lock "da968e6f-ef0a-41b5-b4a5-d11c096ad018" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.828816] env[61356]: WARNING oslo_vmware.rw_handles [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1086.828816] env[61356]: ERROR oslo_vmware.rw_handles [ 1086.829436] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1086.831481] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1086.831566] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Copying Virtual Disk [datastore2] vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/d58e4f86-9d98-44fc-b33c-ece08e8c5361/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1086.831857] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c423386-01e9-478b-a7ed-77acb2173c76 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.839896] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Waiting for the task: (returnval){ [ 1086.839896] env[61356]: value = "task-4302564" [ 1086.839896] env[61356]: _type = "Task" [ 1086.839896] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.847333] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Task: {'id': task-4302564, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.350367] env[61356]: DEBUG oslo_vmware.exceptions [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1087.350714] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.351314] env[61356]: ERROR nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1087.351314] env[61356]: Faults: ['InvalidArgument'] [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Traceback (most recent call last): [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] yield resources [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self.driver.spawn(context, instance, image_meta, [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self._fetch_image_if_missing(context, vi) [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] image_cache(vi, tmp_image_ds_loc) [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] vm_util.copy_virtual_disk( [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] session._wait_for_task(vmdk_copy_task) [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] return self.wait_for_task(task_ref) [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] return evt.wait() [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] result = hub.switch() [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] return self.greenlet.switch() [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self.f(*self.args, **self.kw) [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] raise exceptions.translate_fault(task_info.error) [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Faults: ['InvalidArgument'] [ 1087.351314] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] [ 1087.352295] env[61356]: INFO nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Terminating instance [ 1087.353217] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.353442] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.353676] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9c2b44d-7ac1-4131-968b-08ee64fd7abd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.355807] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.355971] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquired lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.356160] env[61356]: DEBUG nova.network.neutron [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1087.362627] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.363145] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1087.364343] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-487caa10-cfb7-42f1-ba25-9f42d6726e0a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.371654] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Waiting for the task: (returnval){ [ 1087.371654] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52542f5c-42e0-0912-3935-ffde9aa96424" [ 1087.371654] env[61356]: _type = "Task" [ 1087.371654] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.378818] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52542f5c-42e0-0912-3935-ffde9aa96424, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.386652] env[61356]: DEBUG nova.network.neutron [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1087.447431] env[61356]: DEBUG nova.network.neutron [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.456339] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Releasing lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.456583] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1087.456801] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1087.457921] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38675ff0-daf4-4c21-b933-d2c405ebf5ab {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.466176] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1087.466416] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e50eee7-b02c-4077-9638-d6dd9eccd8e3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.492214] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1087.492449] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1087.492626] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Deleting the datastore file [datastore2] cf1a71aa-f864-4fbe-be59-b47322f62a0b {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.492870] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2691bbea-553b-452c-8f0b-3e16d6503d29 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.499165] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Waiting for the task: (returnval){ [ 1087.499165] env[61356]: value = "task-4302566" [ 1087.499165] env[61356]: _type = "Task" [ 1087.499165] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.506469] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Task: {'id': task-4302566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.882196] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1087.882458] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Creating directory with path [datastore2] vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.882695] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6845bfa-b522-4f72-9504-5f0b7754f513 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.894315] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Created directory with path [datastore2] vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.894521] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Fetch image to [datastore2] vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1087.894690] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1087.895491] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810f63d8-69b3-497b-8609-7aace56d5a2a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.902035] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6203f49d-f518-4419-b9c6-f5c9412c57a1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.912023] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43457cf-6089-458f-a04d-8c6e7a6b6782 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.942268] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48acbfd8-cfdf-4816-b2b6-7253902d9332 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.947949] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5a93b794-cd7d-4d5c-96f1-9c7a3dcc6e21 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.969057] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1088.009036] env[61356]: DEBUG oslo_vmware.api [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Task: {'id': task-4302566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033399} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.009036] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.009269] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1088.009320] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1088.009489] env[61356]: INFO nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Took 0.55 seconds to destroy the instance on the hypervisor. [ 1088.009763] env[61356]: DEBUG oslo.service.loopingcall [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.009977] env[61356]: DEBUG nova.compute.manager [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1088.012488] env[61356]: DEBUG nova.compute.claims [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1088.012704] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.013039] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.020969] env[61356]: DEBUG oslo_vmware.rw_handles [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1088.080374] env[61356]: DEBUG oslo_vmware.rw_handles [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1088.080562] env[61356]: DEBUG oslo_vmware.rw_handles [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1088.403490] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d10d5f-5ccd-4c8e-812a-44fb85c07103 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.411429] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c509eb-bc98-4942-a8f3-0300f22964b0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.442018] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9952e82-da71-4a4d-8e33-ae2e9bce4d6b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.448603] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540df49d-7f40-48bd-9d1a-b1109c62072b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.461174] env[61356]: DEBUG nova.compute.provider_tree [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.470288] env[61356]: DEBUG nova.scheduler.client.report [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1088.487583] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.475s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.488147] env[61356]: ERROR nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1088.488147] env[61356]: Faults: ['InvalidArgument'] [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Traceback (most recent call last): [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self.driver.spawn(context, instance, image_meta, [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self._fetch_image_if_missing(context, vi) [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] image_cache(vi, tmp_image_ds_loc) [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] vm_util.copy_virtual_disk( [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] session._wait_for_task(vmdk_copy_task) [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] return self.wait_for_task(task_ref) [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] return evt.wait() [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] result = hub.switch() [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] return self.greenlet.switch() [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] self.f(*self.args, **self.kw) [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] raise exceptions.translate_fault(task_info.error) [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Faults: ['InvalidArgument'] [ 1088.488147] env[61356]: ERROR nova.compute.manager [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] [ 1088.488998] env[61356]: DEBUG nova.compute.utils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1088.490227] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Build of instance cf1a71aa-f864-4fbe-be59-b47322f62a0b was re-scheduled: A specified parameter was not correct: fileType [ 1088.490227] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1088.490600] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1088.490815] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.490961] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquired lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.491137] env[61356]: DEBUG nova.network.neutron [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1088.514995] env[61356]: DEBUG nova.network.neutron [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1088.571228] env[61356]: DEBUG nova.network.neutron [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.579865] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Releasing lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.580093] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1088.580275] env[61356]: DEBUG nova.compute.manager [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1088.671080] env[61356]: INFO nova.scheduler.client.report [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Deleted allocations for instance cf1a71aa-f864-4fbe-be59-b47322f62a0b [ 1088.693580] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7fc341b9-520e-4dae-a809-c1c6fd69d9ef tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 514.006s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.694706] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 312.250s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.694927] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.695141] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.695313] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.700661] env[61356]: INFO nova.compute.manager [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Terminating instance [ 1088.702241] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquiring lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.702400] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Acquired lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.702568] env[61356]: DEBUG nova.network.neutron [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1088.716980] env[61356]: DEBUG nova.compute.manager [None req-aadbc6dd-760e-4501-b740-425c00540327 tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] [instance: 798abe6a-1a6b-4844-b61a-fe258e6f1f04] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1088.730134] env[61356]: DEBUG nova.network.neutron [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1088.740548] env[61356]: DEBUG nova.compute.manager [None req-aadbc6dd-760e-4501-b740-425c00540327 tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] [instance: 798abe6a-1a6b-4844-b61a-fe258e6f1f04] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1088.766607] env[61356]: DEBUG oslo_concurrency.lockutils [None req-aadbc6dd-760e-4501-b740-425c00540327 tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] Lock "798abe6a-1a6b-4844-b61a-fe258e6f1f04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.886s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.776614] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1088.791268] env[61356]: DEBUG nova.network.neutron [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.800753] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Releasing lock "refresh_cache-cf1a71aa-f864-4fbe-be59-b47322f62a0b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.801175] env[61356]: DEBUG nova.compute.manager [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1088.801370] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1088.802217] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11d53d46-e758-47c3-9fb1-04433ce57a2d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.811455] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755902c0-58fc-4635-9002-b683a2dad855 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.844744] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cf1a71aa-f864-4fbe-be59-b47322f62a0b could not be found. [ 1088.844970] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1088.847180] env[61356]: INFO nova.compute.manager [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1088.847433] env[61356]: DEBUG oslo.service.loopingcall [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.848489] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.848721] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.850153] env[61356]: INFO nova.compute.claims [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1088.852595] env[61356]: DEBUG nova.compute.manager [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1088.852595] env[61356]: DEBUG nova.network.neutron [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1088.872591] env[61356]: DEBUG nova.network.neutron [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1088.880415] env[61356]: DEBUG nova.network.neutron [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.894130] env[61356]: INFO nova.compute.manager [-] [instance: cf1a71aa-f864-4fbe-be59-b47322f62a0b] Took 0.04 seconds to deallocate network for instance. [ 1089.006660] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c4a8e921-11fe-414f-be86-3006af8f7e47 tempest-ServersAdmin275Test-1499869652 tempest-ServersAdmin275Test-1499869652-project-member] Lock "cf1a71aa-f864-4fbe-be59-b47322f62a0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.312s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.225381] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736e184d-6bb6-41ae-b398-efa7b39082d5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.233076] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146e4879-6897-4e60-bc89-6acb6e59d780 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.263131] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e3b092-c4a3-4ec5-af97-9104454bb303 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.269686] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6842042-4d47-41aa-ba0e-306c556690bd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.283571] env[61356]: DEBUG nova.compute.provider_tree [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.292252] env[61356]: DEBUG nova.scheduler.client.report [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1089.304812] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.456s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.305322] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1089.335834] env[61356]: DEBUG nova.compute.utils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1089.337483] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1089.339632] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1089.345534] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1089.414174] env[61356]: DEBUG nova.policy [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94d6deb96c6f44c9a1cab0e74f3f2f72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08984d81a5404c1f9b3f7955a2dc4721', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1089.419029] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1089.443960] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1089.444232] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1089.444458] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.444833] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1089.444833] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.444937] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1089.445160] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1089.445323] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1089.445491] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1089.445650] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1089.445820] env[61356]: DEBUG nova.virt.hardware [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1089.446675] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1622d0dd-d5e9-4422-a2b5-25bbbdf5d7dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.455236] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4c9378-e966-429b-a8e9-0fd159f1c61e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.729336] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Successfully created port: 1e12d806-fcf1-4ed2-b27a-8afb551de8cd {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.253185] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Successfully created port: f67b228f-3519-4534-a9ff-b193f51ef7eb {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.761178] env[61356]: DEBUG nova.compute.manager [req-b6b16bd6-e1ed-4778-8a63-55a11416f978 req-25bbbea1-0031-421a-8117-c6ecbf9aae75 service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Received event network-vif-plugged-1e12d806-fcf1-4ed2-b27a-8afb551de8cd {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1090.761387] env[61356]: DEBUG oslo_concurrency.lockutils [req-b6b16bd6-e1ed-4778-8a63-55a11416f978 req-25bbbea1-0031-421a-8117-c6ecbf9aae75 service nova] Acquiring lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.761587] env[61356]: DEBUG oslo_concurrency.lockutils [req-b6b16bd6-e1ed-4778-8a63-55a11416f978 req-25bbbea1-0031-421a-8117-c6ecbf9aae75 service nova] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.761749] env[61356]: DEBUG oslo_concurrency.lockutils [req-b6b16bd6-e1ed-4778-8a63-55a11416f978 req-25bbbea1-0031-421a-8117-c6ecbf9aae75 service nova] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.761907] env[61356]: DEBUG nova.compute.manager [req-b6b16bd6-e1ed-4778-8a63-55a11416f978 req-25bbbea1-0031-421a-8117-c6ecbf9aae75 service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] No waiting events found dispatching network-vif-plugged-1e12d806-fcf1-4ed2-b27a-8afb551de8cd {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1090.762101] env[61356]: WARNING nova.compute.manager [req-b6b16bd6-e1ed-4778-8a63-55a11416f978 req-25bbbea1-0031-421a-8117-c6ecbf9aae75 service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Received unexpected event network-vif-plugged-1e12d806-fcf1-4ed2-b27a-8afb551de8cd for instance with vm_state building and task_state spawning. [ 1090.846062] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Successfully updated port: 1e12d806-fcf1-4ed2-b27a-8afb551de8cd {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1091.495102] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Successfully updated port: f67b228f-3519-4534-a9ff-b193f51ef7eb {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1091.502850] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.502994] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.503155] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1091.550679] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1091.914494] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Updating instance_info_cache with network_info: [{"id": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "address": "fa:16:3e:5c:24:3d", "network": {"id": "5b6e6fb3-c24b-4bef-aa3a-4c512c4d38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1650201600", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e12d806-fc", "ovs_interfaceid": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "address": "fa:16:3e:9d:5c:d4", "network": {"id": "1f0ad4b0-8650-475d-b0fa-25f299f302bf", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666271764", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf67b228f-35", "ovs_interfaceid": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.932686] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Releasing lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.933163] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Instance network_info: |[{"id": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "address": "fa:16:3e:5c:24:3d", "network": {"id": "5b6e6fb3-c24b-4bef-aa3a-4c512c4d38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1650201600", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e12d806-fc", "ovs_interfaceid": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "address": "fa:16:3e:9d:5c:d4", "network": {"id": "1f0ad4b0-8650-475d-b0fa-25f299f302bf", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666271764", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf67b228f-35", "ovs_interfaceid": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1091.934184] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:24:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e12d806-fcf1-4ed2-b27a-8afb551de8cd', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:5c:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f67b228f-3519-4534-a9ff-b193f51ef7eb', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1091.948237] env[61356]: DEBUG oslo.service.loopingcall [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.948598] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1091.949236] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-783167d1-ebf3-4eca-a774-4d3fff5f5f96 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.971258] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1091.971258] env[61356]: value = "task-4302567" [ 1091.971258] env[61356]: _type = "Task" [ 1091.971258] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.980881] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302567, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.482777] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302567, 'name': CreateVM_Task, 'duration_secs': 0.320281} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.482954] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1092.483708] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.483872] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.484229] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1092.484472] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04eeac36-7d7c-4830-91b1-8cc6067f8876 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.488692] env[61356]: DEBUG oslo_vmware.api [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for the task: (returnval){ [ 1092.488692] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52752c14-029e-a186-8bb1-49969fe80557" [ 1092.488692] env[61356]: _type = "Task" [ 1092.488692] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.165520] env[61356]: DEBUG nova.compute.manager [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Received event network-changed-1e12d806-fcf1-4ed2-b27a-8afb551de8cd {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1093.165876] env[61356]: DEBUG nova.compute.manager [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Refreshing instance network info cache due to event network-changed-1e12d806-fcf1-4ed2-b27a-8afb551de8cd. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1093.165921] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Acquiring lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.166047] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Acquired lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.166209] env[61356]: DEBUG nova.network.neutron [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Refreshing network info cache for port 1e12d806-fcf1-4ed2-b27a-8afb551de8cd {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1093.175885] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.176249] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1093.176553] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.468786] env[61356]: DEBUG nova.network.neutron [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Updated VIF entry in instance network info cache for port 1e12d806-fcf1-4ed2-b27a-8afb551de8cd. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1093.469207] env[61356]: DEBUG nova.network.neutron [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Updating instance_info_cache with network_info: [{"id": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "address": "fa:16:3e:5c:24:3d", "network": {"id": "5b6e6fb3-c24b-4bef-aa3a-4c512c4d38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1650201600", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e12d806-fc", "ovs_interfaceid": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "address": "fa:16:3e:9d:5c:d4", "network": {"id": "1f0ad4b0-8650-475d-b0fa-25f299f302bf", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666271764", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf67b228f-35", "ovs_interfaceid": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.478761] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Releasing lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.478993] env[61356]: DEBUG nova.compute.manager [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Received event network-vif-plugged-f67b228f-3519-4534-a9ff-b193f51ef7eb {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1093.479201] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Acquiring lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.479406] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.479567] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.479725] env[61356]: DEBUG nova.compute.manager [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] No waiting events found dispatching network-vif-plugged-f67b228f-3519-4534-a9ff-b193f51ef7eb {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1093.479887] env[61356]: WARNING nova.compute.manager [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Received unexpected event network-vif-plugged-f67b228f-3519-4534-a9ff-b193f51ef7eb for instance with vm_state building and task_state spawning. [ 1093.480058] env[61356]: DEBUG nova.compute.manager [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Received event network-changed-f67b228f-3519-4534-a9ff-b193f51ef7eb {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1093.480212] env[61356]: DEBUG nova.compute.manager [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Refreshing instance network info cache due to event network-changed-f67b228f-3519-4534-a9ff-b193f51ef7eb. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1093.480388] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Acquiring lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.480531] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Acquired lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.480687] env[61356]: DEBUG nova.network.neutron [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Refreshing network info cache for port f67b228f-3519-4534-a9ff-b193f51ef7eb {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1093.733271] env[61356]: DEBUG nova.network.neutron [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Updated VIF entry in instance network info cache for port f67b228f-3519-4534-a9ff-b193f51ef7eb. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1093.733682] env[61356]: DEBUG nova.network.neutron [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Updating instance_info_cache with network_info: [{"id": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "address": "fa:16:3e:5c:24:3d", "network": {"id": "5b6e6fb3-c24b-4bef-aa3a-4c512c4d38e6", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1650201600", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e12d806-fc", "ovs_interfaceid": "1e12d806-fcf1-4ed2-b27a-8afb551de8cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "address": "fa:16:3e:9d:5c:d4", "network": {"id": "1f0ad4b0-8650-475d-b0fa-25f299f302bf", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1666271764", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "08984d81a5404c1f9b3f7955a2dc4721", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf67b228f-35", "ovs_interfaceid": "f67b228f-3519-4534-a9ff-b193f51ef7eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.743367] env[61356]: DEBUG oslo_concurrency.lockutils [req-ac8f2de5-b53c-4c5e-a064-680ea6ba7c5a req-9c745d38-5de8-4aa4-974e-9e936424ddcc service nova] Releasing lock "refresh_cache-85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.758771] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.766797] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.767157] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.767411] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1097.784240] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] There are 1 instances to clean {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1097.784506] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 04b94227-4b90-4472-a367-39b8c5242558] Instance has had 0 of 5 cleanup attempts {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11264}} [ 1100.813423] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1100.825036] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.825279] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.825449] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.825625] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1100.826742] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1439b1cd-eeb2-44e3-8a87-dbc34ccfa2a6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.835455] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b045c6-4863-43cc-b8ef-506393cbd61a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.848687] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c87c842-b629-4231-90a3-38ce232f778f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.854784] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023d0f34-f82c-4e13-9af2-103902ab895c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.885908] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180687MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1100.886076] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.886273] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.888023] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.965804] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.965968] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966116] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966241] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966358] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966475] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966588] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966698] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966808] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.966920] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.978406] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1100.995117] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2e4a6bd2-64d8-4c48-81c6-b3c749e4c270 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.005960] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2186aafa-5fd5-4b61-84f1-a694e20724dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.015622] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 90476778-373e-430f-ad56-b9e7e4d0540b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.025839] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c4aa0d72-8ad1-4a1f-9a5f-279baca944cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.036478] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f0a73d0-c97d-4fa6-b00f-50f14763ad9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.046059] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.054765] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ce5f2517-fbf8-4ecc-b6f5-24c56238786b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.063967] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 508170e0-afe4-4ff5-94cc-d7b093cfd7d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.072723] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bd723957-6a15-4d23-88a4-c708778bee39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.081689] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c63173e3-52d8-4412-a225-37b67187126a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.090474] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 03f22cd3-fea8-43be-a4db-f65521c832a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.100010] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.108488] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.118837] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1fe3a0c0-6d61-450a-8754-ce82f6b24661 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.127911] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance da968e6f-ef0a-41b5-b4a5-d11c096ad018 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.128145] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1101.128290] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1101.417870] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a0d170-b479-4f3e-b84a-9f58a4385a7d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.425436] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6283032f-3171-467d-89b8-d6de3ef0ebc7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.454808] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427245e6-7546-4ea6-b3e5-1cc57b8f23e6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.461899] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c87a995-c88a-4194-aa10-1b02869aeca8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.474767] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.482944] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.497865] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1101.498070] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.612s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.443259] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.443597] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.758134] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.758411] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.754077] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.753113] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.777367] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.777661] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1105.758809] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1105.758988] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1105.759128] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1105.779096] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.779407] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.779407] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.779479] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.779623] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.779748] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.779865] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.779979] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.780106] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.780226] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1105.780373] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1106.758599] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.758758] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances with incomplete migration {{(pid=61356) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1118.998366] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.018838] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Getting list of instances from cluster (obj){ [ 1119.018838] env[61356]: value = "domain-c8" [ 1119.018838] env[61356]: _type = "ClusterComputeResource" [ 1119.018838] env[61356]: } {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1119.020171] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11051b6e-bc41-4fae-9640-7a733878b7bc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.036919] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Got total of 10 instances {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1119.037120] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 58e76dfc-e2f3-41ec-896a-faadb552c1ac {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038033] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid d1887728-4538-4639-a3e7-db235a2d4765 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038033] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038033] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 1df51a6d-1303-4c89-b61e-95070acf0772 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038033] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 2d2dca63-8ab0-4420-90b0-38a330df8271 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038033] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 22e72346-49c6-4044-ae29-96679d4f2253 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038447] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038447] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 1353d492-e9f0-4495-83e1-6832bb03d5db {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038447] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 498180c6-cc07-4c5a-bf1e-784148909d6a {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038605] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1119.038934] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.039187] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "d1887728-4538-4639-a3e7-db235a2d4765" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.039387] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.039580] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "1df51a6d-1303-4c89-b61e-95070acf0772" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.039770] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "2d2dca63-8ab0-4420-90b0-38a330df8271" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.039959] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "22e72346-49c6-4044-ae29-96679d4f2253" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.040167] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.040358] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "1353d492-e9f0-4495-83e1-6832bb03d5db" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.040545] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "498180c6-cc07-4c5a-bf1e-784148909d6a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.040734] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.564117] env[61356]: WARNING oslo_vmware.rw_handles [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1135.564117] env[61356]: ERROR oslo_vmware.rw_handles [ 1135.564769] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1135.566714] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1135.566969] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Copying Virtual Disk [datastore2] vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/87a39336-9a0c-4515-9264-8ac6acf42ad1/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1135.567260] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0883c65c-118d-4501-9751-ec90031bbc04 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.575366] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Waiting for the task: (returnval){ [ 1135.575366] env[61356]: value = "task-4302568" [ 1135.575366] env[61356]: _type = "Task" [ 1135.575366] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.583343] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Task: {'id': task-4302568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.086092] env[61356]: DEBUG oslo_vmware.exceptions [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1136.086360] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.086904] env[61356]: ERROR nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.086904] env[61356]: Faults: ['InvalidArgument'] [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Traceback (most recent call last): [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] yield resources [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self.driver.spawn(context, instance, image_meta, [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self._fetch_image_if_missing(context, vi) [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] image_cache(vi, tmp_image_ds_loc) [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] vm_util.copy_virtual_disk( [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] session._wait_for_task(vmdk_copy_task) [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] return self.wait_for_task(task_ref) [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] return evt.wait() [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] result = hub.switch() [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] return self.greenlet.switch() [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self.f(*self.args, **self.kw) [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] raise exceptions.translate_fault(task_info.error) [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Faults: ['InvalidArgument'] [ 1136.086904] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] [ 1136.087974] env[61356]: INFO nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Terminating instance [ 1136.088832] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.089062] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.089304] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-540f2549-4874-4af9-8665-d59c7e33e408 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.091411] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1136.091606] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1136.092329] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8527928e-007e-49ff-8b9f-64fdaeadafd0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.098924] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1136.099188] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35b567b2-7aec-42e3-a477-010128358d3f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.101240] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.101413] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1136.102336] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-747268ee-049d-4dbf-9fee-4e13895a35c5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.106728] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Waiting for the task: (returnval){ [ 1136.106728] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52807205-72a3-f9e8-6e41-e01623564a85" [ 1136.106728] env[61356]: _type = "Task" [ 1136.106728] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.114766] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52807205-72a3-f9e8-6e41-e01623564a85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.165095] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1136.165351] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1136.165531] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Deleting the datastore file [datastore2] 58e76dfc-e2f3-41ec-896a-faadb552c1ac {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.165814] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8a726d2-932f-489c-bad7-72255eb3333e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.171826] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Waiting for the task: (returnval){ [ 1136.171826] env[61356]: value = "task-4302570" [ 1136.171826] env[61356]: _type = "Task" [ 1136.171826] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.180047] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Task: {'id': task-4302570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.617022] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1136.617022] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Creating directory with path [datastore2] vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.618028] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3e722cb-29fc-496c-b76f-294db48b4989 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.638370] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Created directory with path [datastore2] vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.638575] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Fetch image to [datastore2] vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1136.638691] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1136.639427] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adceeb9-866d-4d4e-ad44-fbdd221b6648 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.645664] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a461b8b-40eb-4793-a22d-21fd6f76a10f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.654570] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8103422d-a1b2-499a-9a5c-7fcdf53d15f0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.688062] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99293735-688c-4577-8676-db7ab3414477 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.696668] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d4198880-3063-48a6-aade-323c115c31fb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.698506] env[61356]: DEBUG oslo_vmware.api [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Task: {'id': task-4302570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072792} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.698820] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.699143] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1136.699411] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1136.699636] env[61356]: INFO nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1136.701740] env[61356]: DEBUG nova.compute.claims [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1136.701953] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.702191] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.724667] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1136.790254] env[61356]: DEBUG oslo_vmware.rw_handles [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1136.851026] env[61356]: DEBUG oslo_vmware.rw_handles [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1136.851133] env[61356]: DEBUG oslo_vmware.rw_handles [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1137.115993] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913abe41-e2b8-4965-956a-673dc6400b56 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.124350] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21e764a-c893-472a-b02e-f45f6e4013fe {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.153168] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f657455-f164-44c6-b9e6-29498a556d33 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.160009] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3397567-39e9-49b0-a619-35e61857a7fe {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.172741] env[61356]: DEBUG nova.compute.provider_tree [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.182622] env[61356]: DEBUG nova.scheduler.client.report [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1137.197798] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.495s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.198325] env[61356]: ERROR nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.198325] env[61356]: Faults: ['InvalidArgument'] [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Traceback (most recent call last): [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self.driver.spawn(context, instance, image_meta, [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self._fetch_image_if_missing(context, vi) [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] image_cache(vi, tmp_image_ds_loc) [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] vm_util.copy_virtual_disk( [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] session._wait_for_task(vmdk_copy_task) [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] return self.wait_for_task(task_ref) [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] return evt.wait() [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] result = hub.switch() [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] return self.greenlet.switch() [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] self.f(*self.args, **self.kw) [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] raise exceptions.translate_fault(task_info.error) [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Faults: ['InvalidArgument'] [ 1137.198325] env[61356]: ERROR nova.compute.manager [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] [ 1137.199245] env[61356]: DEBUG nova.compute.utils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1137.200512] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Build of instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac was re-scheduled: A specified parameter was not correct: fileType [ 1137.200512] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1137.200881] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1137.201067] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1137.201238] env[61356]: DEBUG nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1137.201400] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1137.536929] env[61356]: DEBUG nova.network.neutron [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.550991] env[61356]: INFO nova.compute.manager [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Took 0.35 seconds to deallocate network for instance. [ 1137.650535] env[61356]: INFO nova.scheduler.client.report [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Deleted allocations for instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac [ 1137.674273] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0d7bae3d-5b12-4169-b1bb-5b3c38399640 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 555.082s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.674659] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 354.009s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.675449] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Acquiring lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.675449] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.675449] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.677368] env[61356]: INFO nova.compute.manager [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Terminating instance [ 1137.680787] env[61356]: DEBUG nova.compute.manager [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1137.680985] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1137.681268] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46aaca83-a697-4120-8d75-de059be1c265 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.687692] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1137.695209] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64291301-fc32-49ae-ad7d-cee502b8d152 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.725986] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 58e76dfc-e2f3-41ec-896a-faadb552c1ac could not be found. [ 1137.728570] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1137.728570] env[61356]: INFO nova.compute.manager [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1137.728570] env[61356]: DEBUG oslo.service.loopingcall [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1137.728570] env[61356]: DEBUG nova.compute.manager [-] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1137.728570] env[61356]: DEBUG nova.network.neutron [-] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1137.756724] env[61356]: DEBUG nova.network.neutron [-] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.763721] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.763966] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.765425] env[61356]: INFO nova.compute.claims [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.768615] env[61356]: INFO nova.compute.manager [-] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] Took 0.04 seconds to deallocate network for instance. [ 1137.861447] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9d509295-b3e8-4268-9601-64eb1e8ac624 tempest-VolumesAssistedSnapshotsTest-583933516 tempest-VolumesAssistedSnapshotsTest-583933516-project-member] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.862293] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.823s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.862654] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 58e76dfc-e2f3-41ec-896a-faadb552c1ac] During sync_power_state the instance has a pending task (deleting). Skip. [ 1137.862654] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "58e76dfc-e2f3-41ec-896a-faadb552c1ac" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.160395] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90446e03-7b46-4cab-ad5d-7350576bd294 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.168144] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71600c3c-68ec-4ad6-8971-b2e5000576af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.197912] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1426e9f4-04d0-46c5-8c63-a693ea84c8de {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.205210] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278ee342-f8ba-4dff-bf55-32fe6bf8e82b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.217964] env[61356]: DEBUG nova.compute.provider_tree [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.228024] env[61356]: DEBUG nova.scheduler.client.report [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1138.243090] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.479s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.243567] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1138.278395] env[61356]: DEBUG nova.compute.utils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1138.280144] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1138.280315] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1138.289500] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1138.359098] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1138.362304] env[61356]: DEBUG nova.policy [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c6417300f4cd4084b98b6431ae3b011e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3a04a0b59af4823807aeba845ade95a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1138.386778] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1138.387015] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1138.387244] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.387467] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1138.387678] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.387848] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1138.388066] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1138.388233] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1138.388400] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1138.388564] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1138.388761] env[61356]: DEBUG nova.virt.hardware [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1138.389622] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb83509b-93ae-4dad-94fb-dcad5657c7d2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.397625] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2d27ef-8e98-4143-bc26-6561effa4aec {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.721632] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Successfully created port: 06c8eb7a-494c-4e68-b0cb-7f8b120057d3 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.352176] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Successfully updated port: 06c8eb7a-494c-4e68-b0cb-7f8b120057d3 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1139.377870] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "refresh_cache-d30d291d-6cfc-492e-bbbe-1ff021d4ee75" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.378034] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquired lock "refresh_cache-d30d291d-6cfc-492e-bbbe-1ff021d4ee75" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.378211] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1139.588380] env[61356]: DEBUG nova.compute.manager [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Received event network-vif-plugged-06c8eb7a-494c-4e68-b0cb-7f8b120057d3 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1139.588598] env[61356]: DEBUG oslo_concurrency.lockutils [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] Acquiring lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.588825] env[61356]: DEBUG oslo_concurrency.lockutils [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.589009] env[61356]: DEBUG oslo_concurrency.lockutils [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.589180] env[61356]: DEBUG nova.compute.manager [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] No waiting events found dispatching network-vif-plugged-06c8eb7a-494c-4e68-b0cb-7f8b120057d3 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1139.589394] env[61356]: WARNING nova.compute.manager [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Received unexpected event network-vif-plugged-06c8eb7a-494c-4e68-b0cb-7f8b120057d3 for instance with vm_state building and task_state spawning. [ 1139.589501] env[61356]: DEBUG nova.compute.manager [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Received event network-changed-06c8eb7a-494c-4e68-b0cb-7f8b120057d3 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1139.589653] env[61356]: DEBUG nova.compute.manager [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Refreshing instance network info cache due to event network-changed-06c8eb7a-494c-4e68-b0cb-7f8b120057d3. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1139.589815] env[61356]: DEBUG oslo_concurrency.lockutils [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] Acquiring lock "refresh_cache-d30d291d-6cfc-492e-bbbe-1ff021d4ee75" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1139.590747] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1139.767421] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Updating instance_info_cache with network_info: [{"id": "06c8eb7a-494c-4e68-b0cb-7f8b120057d3", "address": "fa:16:3e:12:5c:38", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06c8eb7a-49", "ovs_interfaceid": "06c8eb7a-494c-4e68-b0cb-7f8b120057d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.783816] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Releasing lock "refresh_cache-d30d291d-6cfc-492e-bbbe-1ff021d4ee75" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.784116] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Instance network_info: |[{"id": "06c8eb7a-494c-4e68-b0cb-7f8b120057d3", "address": "fa:16:3e:12:5c:38", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06c8eb7a-49", "ovs_interfaceid": "06c8eb7a-494c-4e68-b0cb-7f8b120057d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1139.784415] env[61356]: DEBUG oslo_concurrency.lockutils [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] Acquired lock "refresh_cache-d30d291d-6cfc-492e-bbbe-1ff021d4ee75" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.784594] env[61356]: DEBUG nova.network.neutron [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Refreshing network info cache for port 06c8eb7a-494c-4e68-b0cb-7f8b120057d3 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1139.785679] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:5c:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06c8eb7a-494c-4e68-b0cb-7f8b120057d3', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1139.793522] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Creating folder: Project (c3a04a0b59af4823807aeba845ade95a). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1139.796428] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8168cf5e-c22f-4ae3-86ce-de9d2deb232b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.809539] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Created folder: Project (c3a04a0b59af4823807aeba845ade95a) in parent group-v846926. [ 1139.809731] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Creating folder: Instances. Parent ref: group-v846994. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1139.809957] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bdd5109-cb55-4ff8-8a99-4685c64f0eee {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.818504] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Created folder: Instances in parent group-v846994. [ 1139.818762] env[61356]: DEBUG oslo.service.loopingcall [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1139.818949] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1139.819154] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d53ae66f-66cc-4e61-840e-ab021493458a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.840282] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1139.840282] env[61356]: value = "task-4302573" [ 1139.840282] env[61356]: _type = "Task" [ 1139.840282] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.850860] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302573, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.076929] env[61356]: DEBUG nova.network.neutron [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Updated VIF entry in instance network info cache for port 06c8eb7a-494c-4e68-b0cb-7f8b120057d3. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1140.077335] env[61356]: DEBUG nova.network.neutron [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Updating instance_info_cache with network_info: [{"id": "06c8eb7a-494c-4e68-b0cb-7f8b120057d3", "address": "fa:16:3e:12:5c:38", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06c8eb7a-49", "ovs_interfaceid": "06c8eb7a-494c-4e68-b0cb-7f8b120057d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.087432] env[61356]: DEBUG oslo_concurrency.lockutils [req-97dbe924-da52-4dc1-9935-834552412172 req-9cc1f4dc-c322-4255-bde4-247c1dc15fdb service nova] Releasing lock "refresh_cache-d30d291d-6cfc-492e-bbbe-1ff021d4ee75" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.352270] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302573, 'name': CreateVM_Task, 'duration_secs': 0.27686} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.353027] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1140.353350] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.353514] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.353852] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1140.354122] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90ce6261-362f-45fa-9d54-484fe15c90b2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.358357] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Waiting for the task: (returnval){ [ 1140.358357] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5279e347-2e42-8af9-d03a-41f6070af2b2" [ 1140.358357] env[61356]: _type = "Task" [ 1140.358357] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.365721] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5279e347-2e42-8af9-d03a-41f6070af2b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.869806] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.870754] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.870754] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.211961] env[61356]: DEBUG oslo_concurrency.lockutils [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.631185] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.631535] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.801268] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.758718] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.770668] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.770902] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.771096] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.771260] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1160.772524] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afae6b11-51be-4324-bcb3-fdc75121fb67 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.781450] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff9c230-531c-405d-9a21-bf22e4a8157b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.796525] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb92d8a-a657-4ffa-b0f9-1c2e9306b51c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.802834] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85634827-553c-48d9-b41c-30c1a0f3879e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.831178] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180667MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1160.831341] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.831517] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1160.988669] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d1887728-4538-4639-a3e7-db235a2d4765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.988893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989036] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989167] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989287] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989406] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989520] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989632] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989745] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1160.989857] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.005927] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2e4a6bd2-64d8-4c48-81c6-b3c749e4c270 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.018796] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2186aafa-5fd5-4b61-84f1-a694e20724dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.029185] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 90476778-373e-430f-ad56-b9e7e4d0540b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.038960] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c4aa0d72-8ad1-4a1f-9a5f-279baca944cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.049063] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9f0a73d0-c97d-4fa6-b00f-50f14763ad9b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.059103] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.068934] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ce5f2517-fbf8-4ecc-b6f5-24c56238786b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.078572] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 508170e0-afe4-4ff5-94cc-d7b093cfd7d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.088486] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bd723957-6a15-4d23-88a4-c708778bee39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.097757] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c63173e3-52d8-4412-a225-37b67187126a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.107502] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 03f22cd3-fea8-43be-a4db-f65521c832a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.117471] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.126218] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.134985] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1fe3a0c0-6d61-450a-8754-ce82f6b24661 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.144124] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance da968e6f-ef0a-41b5-b4a5-d11c096ad018 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.152545] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1161.152758] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1161.152904] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1161.169876] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing inventories for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1161.185077] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Updating ProviderTree inventory for provider cfb833eb-84f8-480d-866a-1ad2fb871484 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1161.185256] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Updating inventory in ProviderTree for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1161.195395] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing aggregate associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, aggregates: None {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1161.212918] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing trait associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1161.497381] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cdac8b-e16c-458b-8a1c-b9c7d6a84822 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.504910] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2696e4d5-1046-4a42-ad94-4c1cbe3b1ee9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.533693] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c6a9f1-d6f0-4fed-8049-6c5bb0695651 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.540848] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1048b435-9c94-4816-8cc1-0a5ee73fe87a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.553289] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.563308] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1161.582026] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1161.582232] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.751s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.747997] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.748286] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.581960] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.582235] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.753591] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.757209] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.757385] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.759036] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.759036] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1167.759683] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.760067] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1167.760067] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1167.782368] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.782557] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.782781] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.782964] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.783176] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.783350] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.783518] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.783647] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.783765] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.783881] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1167.783997] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1173.171020] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a0fba7f-fb95-4f73-aedc-84e96903ee05 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Acquiring lock "bca58b74-2a67-4943-b6f3-52dec9227ceb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.171347] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a0fba7f-fb95-4f73-aedc-84e96903ee05 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Lock "bca58b74-2a67-4943-b6f3-52dec9227ceb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.602605] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2863887e-2541-4286-9805-95cb0e90e05e tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Acquiring lock "bec41d8f-c7c9-4c23-af43-0ef4816f631e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.602894] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2863887e-2541-4286-9805-95cb0e90e05e tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Lock "bec41d8f-c7c9-4c23-af43-0ef4816f631e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.222620] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce4fbea7-96ba-4519-a83c-b387e03efa28 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Acquiring lock "d512abed-9776-449c-9de0-00925d888964" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.222936] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce4fbea7-96ba-4519-a83c-b387e03efa28 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Lock "d512abed-9776-449c-9de0-00925d888964" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.441809] env[61356]: WARNING oslo_vmware.rw_handles [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1183.441809] env[61356]: ERROR oslo_vmware.rw_handles [ 1183.442451] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1183.444451] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1183.446087] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Copying Virtual Disk [datastore2] vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/940e4c80-84e2-4883-a8d4-d4652de7960e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1183.446087] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e416c187-2e00-412f-a872-53e044da2e44 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.452927] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Waiting for the task: (returnval){ [ 1183.452927] env[61356]: value = "task-4302574" [ 1183.452927] env[61356]: _type = "Task" [ 1183.452927] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.461745] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Task: {'id': task-4302574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.836888] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2f54bebd-912e-4168-827a-f20cf4fd8a12 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "1c4d0b51-253c-4f0f-8c83-b341900afd28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.837177] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2f54bebd-912e-4168-827a-f20cf4fd8a12 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "1c4d0b51-253c-4f0f-8c83-b341900afd28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.964619] env[61356]: DEBUG oslo_vmware.exceptions [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1183.964935] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.965557] env[61356]: ERROR nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1183.965557] env[61356]: Faults: ['InvalidArgument'] [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] Traceback (most recent call last): [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] yield resources [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self.driver.spawn(context, instance, image_meta, [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self._fetch_image_if_missing(context, vi) [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] image_cache(vi, tmp_image_ds_loc) [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] vm_util.copy_virtual_disk( [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] session._wait_for_task(vmdk_copy_task) [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] return self.wait_for_task(task_ref) [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] return evt.wait() [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] result = hub.switch() [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] return self.greenlet.switch() [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self.f(*self.args, **self.kw) [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] raise exceptions.translate_fault(task_info.error) [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] Faults: ['InvalidArgument'] [ 1183.965557] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] [ 1183.967318] env[61356]: INFO nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Terminating instance [ 1183.967614] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.967819] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.968806] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1183.969032] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1183.969296] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83bc3c17-d854-4ce8-8df5-98ee8e6c2863 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.972063] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a4de17-4c18-467a-81a7-0942ee7b7cb4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.979048] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1183.979289] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20df266d-5001-4356-80ba-9a113c5797a0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.981725] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1183.981853] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1183.982886] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a82844c3-f0ff-4695-88eb-9858393cc3e0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.987920] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 1183.987920] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5293f07d-234b-5ccb-a2a6-bee3aa71a754" [ 1183.987920] env[61356]: _type = "Task" [ 1183.987920] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.995609] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5293f07d-234b-5ccb-a2a6-bee3aa71a754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.052472] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1184.052774] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1184.052998] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Deleting the datastore file [datastore2] d1887728-4538-4639-a3e7-db235a2d4765 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1184.053307] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dbc27d3-f3c0-4578-9525-28a75807e41b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.060149] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Waiting for the task: (returnval){ [ 1184.060149] env[61356]: value = "task-4302576" [ 1184.060149] env[61356]: _type = "Task" [ 1184.060149] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.067751] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Task: {'id': task-4302576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.500206] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1184.500206] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating directory with path [datastore2] vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1184.500206] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07dca811-070d-47b6-8845-e40bc08bcc96 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.511570] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created directory with path [datastore2] vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.511774] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Fetch image to [datastore2] vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1184.511946] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1184.512701] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46eb77d-15cd-4a95-8fd3-6746c05353da {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.520225] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf627f25-df16-4d12-b69c-052316bad8a3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.529357] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fe48fb-5df8-4eb8-8f4d-ea8f453dd9bc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.563141] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12c830d-16f3-4473-87db-5a8dda609755 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.571588] env[61356]: DEBUG oslo_vmware.api [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Task: {'id': task-4302576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066349} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.571971] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1184.572133] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1184.572357] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1184.572569] env[61356]: INFO nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1184.574430] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-09de9513-cb8e-47bf-80a4-d0f17bc17209 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.576492] env[61356]: DEBUG nova.compute.claims [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1184.576678] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.576925] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.604355] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1184.801897] env[61356]: DEBUG oslo_vmware.rw_handles [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1184.869098] env[61356]: DEBUG oslo_vmware.rw_handles [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1184.869413] env[61356]: DEBUG oslo_vmware.rw_handles [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1185.081845] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede9394c-2127-47d7-8b04-3a2e6a2a2c23 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.089816] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca7613d-6ab0-4d28-b8cb-33a299884d33 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.120739] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3d6d44-84d3-42f1-beb5-7f0cb91cac82 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.128140] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ef1235-1d5b-4aa5-b3bf-e65c02b1ad9c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.141323] env[61356]: DEBUG nova.compute.provider_tree [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.154350] env[61356]: DEBUG nova.scheduler.client.report [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1185.170025] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.593s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.170795] env[61356]: ERROR nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1185.170795] env[61356]: Faults: ['InvalidArgument'] [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] Traceback (most recent call last): [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self.driver.spawn(context, instance, image_meta, [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self._fetch_image_if_missing(context, vi) [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] image_cache(vi, tmp_image_ds_loc) [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] vm_util.copy_virtual_disk( [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] session._wait_for_task(vmdk_copy_task) [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] return self.wait_for_task(task_ref) [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] return evt.wait() [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] result = hub.switch() [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] return self.greenlet.switch() [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] self.f(*self.args, **self.kw) [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] raise exceptions.translate_fault(task_info.error) [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] Faults: ['InvalidArgument'] [ 1185.170795] env[61356]: ERROR nova.compute.manager [instance: d1887728-4538-4639-a3e7-db235a2d4765] [ 1185.171779] env[61356]: DEBUG nova.compute.utils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1185.173062] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Build of instance d1887728-4538-4639-a3e7-db235a2d4765 was re-scheduled: A specified parameter was not correct: fileType [ 1185.173062] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1185.173446] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1185.173622] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1185.173794] env[61356]: DEBUG nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1185.173955] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1185.572811] env[61356]: DEBUG nova.network.neutron [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.593775] env[61356]: INFO nova.compute.manager [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Took 0.42 seconds to deallocate network for instance. [ 1185.710600] env[61356]: INFO nova.scheduler.client.report [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Deleted allocations for instance d1887728-4538-4639-a3e7-db235a2d4765 [ 1185.757879] env[61356]: DEBUG oslo_concurrency.lockutils [None req-0bfd0d3f-0828-41dd-be37-90a4cac16248 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "d1887728-4538-4639-a3e7-db235a2d4765" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.363s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.759215] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "d1887728-4538-4639-a3e7-db235a2d4765" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 400.313s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.763021] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Acquiring lock "d1887728-4538-4639-a3e7-db235a2d4765-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.763021] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "d1887728-4538-4639-a3e7-db235a2d4765-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.763021] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "d1887728-4538-4639-a3e7-db235a2d4765-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.763021] env[61356]: INFO nova.compute.manager [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Terminating instance [ 1185.763806] env[61356]: DEBUG nova.compute.manager [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1185.763995] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1185.764514] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0140ee7e-dd16-41d9-b791-41c206be80a3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.773814] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4603a21f-913d-4071-9940-cd8a74ab7bf1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.786710] env[61356]: DEBUG nova.compute.manager [None req-80bc413d-f36b-4f2b-9f7f-f5f2d18e31fd tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 2e4a6bd2-64d8-4c48-81c6-b3c749e4c270] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1185.804202] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d1887728-4538-4639-a3e7-db235a2d4765 could not be found. [ 1185.804406] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1185.804581] env[61356]: INFO nova.compute.manager [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1185.804829] env[61356]: DEBUG oslo.service.loopingcall [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1185.805072] env[61356]: DEBUG nova.compute.manager [-] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1185.807249] env[61356]: DEBUG nova.network.neutron [-] [instance: d1887728-4538-4639-a3e7-db235a2d4765] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1185.815374] env[61356]: DEBUG nova.compute.manager [None req-80bc413d-f36b-4f2b-9f7f-f5f2d18e31fd tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 2e4a6bd2-64d8-4c48-81c6-b3c749e4c270] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1185.834816] env[61356]: DEBUG nova.network.neutron [-] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.844312] env[61356]: DEBUG oslo_concurrency.lockutils [None req-80bc413d-f36b-4f2b-9f7f-f5f2d18e31fd tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "2e4a6bd2-64d8-4c48-81c6-b3c749e4c270" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.198s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.855651] env[61356]: DEBUG nova.compute.manager [None req-ac262828-21f1-48c8-92e2-7aa69934fad6 tempest-InstanceActionsV221TestJSON-1486762965 tempest-InstanceActionsV221TestJSON-1486762965-project-member] [instance: 2186aafa-5fd5-4b61-84f1-a694e20724dd] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1185.877880] env[61356]: INFO nova.compute.manager [-] [instance: d1887728-4538-4639-a3e7-db235a2d4765] Took 0.07 seconds to deallocate network for instance. [ 1185.888926] env[61356]: DEBUG nova.compute.manager [None req-ac262828-21f1-48c8-92e2-7aa69934fad6 tempest-InstanceActionsV221TestJSON-1486762965 tempest-InstanceActionsV221TestJSON-1486762965-project-member] [instance: 2186aafa-5fd5-4b61-84f1-a694e20724dd] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1185.920248] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ac262828-21f1-48c8-92e2-7aa69934fad6 tempest-InstanceActionsV221TestJSON-1486762965 tempest-InstanceActionsV221TestJSON-1486762965-project-member] Lock "2186aafa-5fd5-4b61-84f1-a694e20724dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.825s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.966451] env[61356]: DEBUG nova.compute.manager [None req-d04e7ed3-b17c-44cd-8d64-91eab9b77bad tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] [instance: 90476778-373e-430f-ad56-b9e7e4d0540b] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1185.998271] env[61356]: DEBUG nova.compute.manager [None req-d04e7ed3-b17c-44cd-8d64-91eab9b77bad tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] [instance: 90476778-373e-430f-ad56-b9e7e4d0540b] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1186.002274] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7c41b69e-37bb-49b7-860b-a620f1b145f8 tempest-ServerGroupTestJSON-1183880177 tempest-ServerGroupTestJSON-1183880177-project-member] Lock "d1887728-4538-4639-a3e7-db235a2d4765" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.243s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.003612] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "d1887728-4538-4639-a3e7-db235a2d4765" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 66.964s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.004082] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d1887728-4538-4639-a3e7-db235a2d4765] During sync_power_state the instance has a pending task (deleting). Skip. [ 1186.004223] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "d1887728-4538-4639-a3e7-db235a2d4765" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.038886] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d04e7ed3-b17c-44cd-8d64-91eab9b77bad tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] Lock "90476778-373e-430f-ad56-b9e7e4d0540b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.347s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.051752] env[61356]: DEBUG nova.compute.manager [None req-cab94cc8-f2f9-45fa-a28c-bb2bddde79e0 tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] [instance: c4aa0d72-8ad1-4a1f-9a5f-279baca944cb] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1186.083355] env[61356]: DEBUG nova.compute.manager [None req-cab94cc8-f2f9-45fa-a28c-bb2bddde79e0 tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] [instance: c4aa0d72-8ad1-4a1f-9a5f-279baca944cb] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1186.118532] env[61356]: DEBUG oslo_concurrency.lockutils [None req-cab94cc8-f2f9-45fa-a28c-bb2bddde79e0 tempest-ServersAdminTestJSON-215682310 tempest-ServersAdminTestJSON-215682310-project-member] Lock "c4aa0d72-8ad1-4a1f-9a5f-279baca944cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.279s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.132291] env[61356]: DEBUG nova.compute.manager [None req-f8fe403b-adac-4181-8970-589c2c40f127 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9f0a73d0-c97d-4fa6-b00f-50f14763ad9b] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1186.167190] env[61356]: DEBUG nova.compute.manager [None req-f8fe403b-adac-4181-8970-589c2c40f127 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9f0a73d0-c97d-4fa6-b00f-50f14763ad9b] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1186.205054] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f8fe403b-adac-4181-8970-589c2c40f127 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9f0a73d0-c97d-4fa6-b00f-50f14763ad9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.816s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.218720] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1186.298047] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.298385] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.300119] env[61356]: INFO nova.compute.claims [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1186.764037] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95b6951-9813-4dd7-93cb-9a195ac4bcf5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.772122] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2278d5c-4000-4b22-b8b3-024df6e696cb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.803467] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4307f913-3a33-4844-9250-0eb1922cbed4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.811399] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43a702b-b102-4010-8951-7b4fbca71263 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.827169] env[61356]: DEBUG nova.compute.provider_tree [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.838139] env[61356]: DEBUG nova.scheduler.client.report [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1186.855412] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.557s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1186.855967] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1186.893707] env[61356]: DEBUG nova.compute.utils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1186.894987] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1186.895176] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1186.909774] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1186.994832] env[61356]: DEBUG nova.policy [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af8a8b090150418198025e06b341a90f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec911d8f85794d1ba508e671e75308e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1187.051299] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1187.090441] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1187.090713] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1187.090878] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1187.091078] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1187.091233] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1187.091373] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1187.091580] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1187.091740] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1187.091906] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1187.092079] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1187.092258] env[61356]: DEBUG nova.virt.hardware [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1187.093470] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9628c3ea-d243-4684-8844-18fd6c9bb7ba {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.102720] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8111b5bb-d816-4d07-bf84-172dd806be5d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.464521] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Successfully created port: eddaac20-0993-42a5-afbd-43d9ec8c88af {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1188.175530] env[61356]: DEBUG nova.compute.manager [req-543ea72d-072d-4fb9-9ef5-593404aebdf2 req-c4a1aaa8-bb73-434c-bd59-4bdae2c1a5fc service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Received event network-vif-plugged-eddaac20-0993-42a5-afbd-43d9ec8c88af {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1188.175815] env[61356]: DEBUG oslo_concurrency.lockutils [req-543ea72d-072d-4fb9-9ef5-593404aebdf2 req-c4a1aaa8-bb73-434c-bd59-4bdae2c1a5fc service nova] Acquiring lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.175996] env[61356]: DEBUG oslo_concurrency.lockutils [req-543ea72d-072d-4fb9-9ef5-593404aebdf2 req-c4a1aaa8-bb73-434c-bd59-4bdae2c1a5fc service nova] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.176274] env[61356]: DEBUG oslo_concurrency.lockutils [req-543ea72d-072d-4fb9-9ef5-593404aebdf2 req-c4a1aaa8-bb73-434c-bd59-4bdae2c1a5fc service nova] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.176362] env[61356]: DEBUG nova.compute.manager [req-543ea72d-072d-4fb9-9ef5-593404aebdf2 req-c4a1aaa8-bb73-434c-bd59-4bdae2c1a5fc service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] No waiting events found dispatching network-vif-plugged-eddaac20-0993-42a5-afbd-43d9ec8c88af {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1188.176529] env[61356]: WARNING nova.compute.manager [req-543ea72d-072d-4fb9-9ef5-593404aebdf2 req-c4a1aaa8-bb73-434c-bd59-4bdae2c1a5fc service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Received unexpected event network-vif-plugged-eddaac20-0993-42a5-afbd-43d9ec8c88af for instance with vm_state building and task_state spawning. [ 1188.294593] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Successfully updated port: eddaac20-0993-42a5-afbd-43d9ec8c88af {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1188.307562] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "refresh_cache-c22e625c-3fd2-4280-9cc7-bb7e08d700d5" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.307808] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquired lock "refresh_cache-c22e625c-3fd2-4280-9cc7-bb7e08d700d5" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.307886] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1188.355466] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1188.539960] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Updating instance_info_cache with network_info: [{"id": "eddaac20-0993-42a5-afbd-43d9ec8c88af", "address": "fa:16:3e:bf:a7:9b", "network": {"id": "707b8118-9e44-468e-ac97-901a78a590e2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1812931710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec911d8f85794d1ba508e671e75308e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeddaac20-09", "ovs_interfaceid": "eddaac20-0993-42a5-afbd-43d9ec8c88af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.554920] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Releasing lock "refresh_cache-c22e625c-3fd2-4280-9cc7-bb7e08d700d5" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.555240] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Instance network_info: |[{"id": "eddaac20-0993-42a5-afbd-43d9ec8c88af", "address": "fa:16:3e:bf:a7:9b", "network": {"id": "707b8118-9e44-468e-ac97-901a78a590e2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1812931710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec911d8f85794d1ba508e671e75308e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeddaac20-09", "ovs_interfaceid": "eddaac20-0993-42a5-afbd-43d9ec8c88af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1188.555667] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:a7:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eddaac20-0993-42a5-afbd-43d9ec8c88af', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1188.564687] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Creating folder: Project (ec911d8f85794d1ba508e671e75308e6). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1188.565677] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48414edf-318f-4d1d-9b85-62d499e85c62 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.576008] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Created folder: Project (ec911d8f85794d1ba508e671e75308e6) in parent group-v846926. [ 1188.576252] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Creating folder: Instances. Parent ref: group-v846997. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1188.576487] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f19f2113-acb8-4ba8-ba99-1763cf2a887a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.585934] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Created folder: Instances in parent group-v846997. [ 1188.586202] env[61356]: DEBUG oslo.service.loopingcall [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1188.586390] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1188.586584] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-04c8c5be-4e60-4b7e-b8fe-dafff0d30b6a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.605978] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1188.605978] env[61356]: value = "task-4302579" [ 1188.605978] env[61356]: _type = "Task" [ 1188.605978] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.613538] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302579, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.116434] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302579, 'name': CreateVM_Task} progress is 25%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.617658] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302579, 'name': CreateVM_Task, 'duration_secs': 0.770963} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.617658] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1189.628483] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.628674] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.629014] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1189.629362] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2635a141-82df-416b-a34a-ee610208f3a2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.634078] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Waiting for the task: (returnval){ [ 1189.634078] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]524ec4f5-4508-e7e6-6fa7-72f6c6e6251a" [ 1189.634078] env[61356]: _type = "Task" [ 1189.634078] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.642664] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]524ec4f5-4508-e7e6-6fa7-72f6c6e6251a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.148366] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.148640] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1190.148923] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.225068] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.271174] env[61356]: DEBUG nova.compute.manager [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Received event network-changed-eddaac20-0993-42a5-afbd-43d9ec8c88af {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1190.271174] env[61356]: DEBUG nova.compute.manager [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Refreshing instance network info cache due to event network-changed-eddaac20-0993-42a5-afbd-43d9ec8c88af. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1190.271174] env[61356]: DEBUG oslo_concurrency.lockutils [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] Acquiring lock "refresh_cache-c22e625c-3fd2-4280-9cc7-bb7e08d700d5" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1190.271174] env[61356]: DEBUG oslo_concurrency.lockutils [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] Acquired lock "refresh_cache-c22e625c-3fd2-4280-9cc7-bb7e08d700d5" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.271174] env[61356]: DEBUG nova.network.neutron [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Refreshing network info cache for port eddaac20-0993-42a5-afbd-43d9ec8c88af {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1190.906743] env[61356]: DEBUG nova.network.neutron [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Updated VIF entry in instance network info cache for port eddaac20-0993-42a5-afbd-43d9ec8c88af. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1190.906743] env[61356]: DEBUG nova.network.neutron [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Updating instance_info_cache with network_info: [{"id": "eddaac20-0993-42a5-afbd-43d9ec8c88af", "address": "fa:16:3e:bf:a7:9b", "network": {"id": "707b8118-9e44-468e-ac97-901a78a590e2", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1812931710-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec911d8f85794d1ba508e671e75308e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeddaac20-09", "ovs_interfaceid": "eddaac20-0993-42a5-afbd-43d9ec8c88af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.916434] env[61356]: DEBUG oslo_concurrency.lockutils [req-50167f37-f689-4bc0-8803-3635dc419a0f req-3b029916-7801-4ef2-bb7b-b7ec6d26b649 service nova] Releasing lock "refresh_cache-c22e625c-3fd2-4280-9cc7-bb7e08d700d5" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1193.266977] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.267480] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.446282] env[61356]: DEBUG oslo_concurrency.lockutils [None req-46d4c536-c4cb-4ba6-89e3-934787724c63 tempest-ServerShowV257Test-376094274 tempest-ServerShowV257Test-376094274-project-member] Acquiring lock "b23c7c3d-86d9-4b3c-b121-099833985d7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.446563] env[61356]: DEBUG oslo_concurrency.lockutils [None req-46d4c536-c4cb-4ba6-89e3-934787724c63 tempest-ServerShowV257Test-376094274 tempest-ServerShowV257Test-376094274-project-member] Lock "b23c7c3d-86d9-4b3c-b121-099833985d7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.283642] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ef8f33af-6ae8-4bac-8cc4-f14f9a8b10ef tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "15e801d1-823a-4a9c-876a-c60fa6c686f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.283912] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ef8f33af-6ae8-4bac-8cc4-f14f9a8b10ef tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "15e801d1-823a-4a9c-876a-c60fa6c686f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.720714] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ea2cdbb9-7937-400a-a46a-b0c52678ce3d tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] Acquiring lock "f7170bb5-0399-44d5-adf0-6eca96e5f7b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.721047] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ea2cdbb9-7937-400a-a46a-b0c52678ce3d tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] Lock "f7170bb5-0399-44d5-adf0-6eca96e5f7b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.758649] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.757618] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.769511] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.769814] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.769941] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.770111] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1220.771734] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eac7457-e683-4595-b2d5-020e11a4ef95 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.781202] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6112698b-1202-4f01-9a53-94e83e4adb15 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.795285] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420883bc-9e6a-4a5d-80f8-b1df3965b4ec {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.802101] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a914f8-6bed-4bf2-92a6-916511b5bfb1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.833743] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180686MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1220.833971] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.834272] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1df51a6d-1303-4c89-b61e-95070acf0772 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.922893] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1220.933328] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1220.945038] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1220.956347] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1fe3a0c0-6d61-450a-8754-ce82f6b24661 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1220.966540] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance da968e6f-ef0a-41b5-b4a5-d11c096ad018 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1220.976681] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1220.989536] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1220.998937] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bca58b74-2a67-4943-b6f3-52dec9227ceb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.008384] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bec41d8f-c7c9-4c23-af43-0ef4816f631e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.018095] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d512abed-9776-449c-9de0-00925d888964 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.029378] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1c4d0b51-253c-4f0f-8c83-b341900afd28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.039442] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.050794] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b23c7c3d-86d9-4b3c-b121-099833985d7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.060839] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 15e801d1-823a-4a9c-876a-c60fa6c686f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.071199] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7170bb5-0399-44d5-adf0-6eca96e5f7b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1221.071438] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1221.071585] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1221.392839] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34788256-e211-4407-ac3e-8a05eeb36f82 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.401802] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db992ba7-f1c8-406a-897a-8fd7c1fb7322 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.431864] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9587d798-9f19-4bcd-a5f4-a4b54c535634 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.439986] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac18182e-0951-4255-95d1-76cfd07cf073 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.452500] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1221.461347] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1221.474715] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1221.474900] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.641s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.470652] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.470963] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.759063] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.759063] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.759063] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.754727] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.776014] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.776176] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1229.759170] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.759510] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1229.759510] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1229.780122] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.780276] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.780364] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.780491] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.780612] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.780730] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.780863] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.781080] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.781263] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.781400] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1229.781523] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1230.598142] env[61356]: WARNING oslo_vmware.rw_handles [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1230.598142] env[61356]: ERROR oslo_vmware.rw_handles [ 1230.598683] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1230.600477] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1230.600729] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Copying Virtual Disk [datastore2] vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/704a12df-5e24-4010-b4bf-c2e861cb750c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1230.601014] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c43f884f-03e3-4995-9713-3f24d4e58d60 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.610900] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 1230.610900] env[61356]: value = "task-4302580" [ 1230.610900] env[61356]: _type = "Task" [ 1230.610900] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.618777] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.120761] env[61356]: DEBUG oslo_vmware.exceptions [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1231.121149] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.121628] env[61356]: ERROR nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1231.121628] env[61356]: Faults: ['InvalidArgument'] [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Traceback (most recent call last): [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] yield resources [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self.driver.spawn(context, instance, image_meta, [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self._fetch_image_if_missing(context, vi) [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] image_cache(vi, tmp_image_ds_loc) [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] vm_util.copy_virtual_disk( [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] session._wait_for_task(vmdk_copy_task) [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] return self.wait_for_task(task_ref) [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] return evt.wait() [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] result = hub.switch() [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] return self.greenlet.switch() [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self.f(*self.args, **self.kw) [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] raise exceptions.translate_fault(task_info.error) [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Faults: ['InvalidArgument'] [ 1231.121628] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] [ 1231.122751] env[61356]: INFO nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Terminating instance [ 1231.123537] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.123754] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.123989] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-175d5fc5-b4dd-4433-a7a7-c32d4fd30652 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.127084] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1231.127281] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1231.127984] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba23b26b-f611-408b-bf9a-aa52ea0feebe {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.134667] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1231.134886] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58d10363-ea62-4b50-8dac-4c8bf749abe8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.137057] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.137236] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1231.138140] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4fdac2-d4a0-4d3c-a08b-a985baa46bbf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.142764] env[61356]: DEBUG oslo_vmware.api [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Waiting for the task: (returnval){ [ 1231.142764] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52170530-61d5-39e6-5e77-96af8ed51d07" [ 1231.142764] env[61356]: _type = "Task" [ 1231.142764] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.156785] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1231.157011] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Creating directory with path [datastore2] vmware_temp/84158aaf-c9e4-462d-89e7-716ba44d8e78/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.157219] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b626a79-ef79-4d8a-a064-78922d84bfa1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.167527] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Created directory with path [datastore2] vmware_temp/84158aaf-c9e4-462d-89e7-716ba44d8e78/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.167749] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Fetch image to [datastore2] vmware_temp/84158aaf-c9e4-462d-89e7-716ba44d8e78/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1231.167927] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/84158aaf-c9e4-462d-89e7-716ba44d8e78/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1231.168624] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d45d2c-fbe5-4f90-a0ad-a0de8104ac24 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.174934] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8eb5ae-e5ec-4aba-b954-53f35a32fbf8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.183605] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ade6d5-137b-430c-8edc-f43dc4147325 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.214479] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8144ae1-55f0-48ff-a168-228efd2d046e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.216962] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1231.217172] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1231.217350] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleting the datastore file [datastore2] 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1231.217578] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be97adc9-c74a-43b6-907f-8f45e6bbb9eb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.223247] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f0b865ef-2585-4143-853b-4bd932314592 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.224919] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 1231.224919] env[61356]: value = "task-4302582" [ 1231.224919] env[61356]: _type = "Task" [ 1231.224919] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.233597] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.246800] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1231.456273] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.458044] env[61356]: ERROR nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Traceback (most recent call last): [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] result = getattr(controller, method)(*args, **kwargs) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._get(image_id) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] resp, body = self.http_client.get(url, headers=header) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.request(url, 'GET', **kwargs) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._handle_response(resp) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise exc.from_response(resp, resp.content) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] During handling of the above exception, another exception occurred: [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Traceback (most recent call last): [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] yield resources [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self.driver.spawn(context, instance, image_meta, [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._fetch_image_if_missing(context, vi) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] image_fetch(context, vi, tmp_image_ds_loc) [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] images.fetch_image( [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1231.458044] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] metadata = IMAGE_API.get(context, image_ref) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return session.show(context, image_id, [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] _reraise_translated_image_exception(image_id) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise new_exc.with_traceback(exc_trace) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] result = getattr(controller, method)(*args, **kwargs) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._get(image_id) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] resp, body = self.http_client.get(url, headers=header) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.request(url, 'GET', **kwargs) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._handle_response(resp) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise exc.from_response(resp, resp.content) [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1231.459594] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1231.459594] env[61356]: INFO nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Terminating instance [ 1231.460939] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.461012] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.461360] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-586aff6b-a888-4900-9d04-39854bbd3865 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.465140] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.465309] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquired lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.465540] env[61356]: DEBUG nova.network.neutron [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1231.478392] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.478569] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1231.479624] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b79c217c-ef29-4222-931e-5021a4c95140 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.485037] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Waiting for the task: (returnval){ [ 1231.485037] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527ce3a4-8a7e-051c-e7da-e9bc845517d4" [ 1231.485037] env[61356]: _type = "Task" [ 1231.485037] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.493247] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527ce3a4-8a7e-051c-e7da-e9bc845517d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.496320] env[61356]: DEBUG nova.network.neutron [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1231.566520] env[61356]: DEBUG nova.network.neutron [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1231.576030] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Releasing lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1231.576424] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1231.576622] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1231.577730] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dda5607-c738-48f7-9bac-e8c57f022ac1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.585878] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1231.586134] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2fcd5e8-4e51-4013-b2c6-58961bef88f7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.615338] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1231.615589] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1231.615773] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Deleting the datastore file [datastore2] 1df51a6d-1303-4c89-b61e-95070acf0772 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1231.616038] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdcf2172-9b87-4c47-98bc-d82c233b35bf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.621760] env[61356]: DEBUG oslo_vmware.api [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Waiting for the task: (returnval){ [ 1231.621760] env[61356]: value = "task-4302584" [ 1231.621760] env[61356]: _type = "Task" [ 1231.621760] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.629270] env[61356]: DEBUG oslo_vmware.api [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Task: {'id': task-4302584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.734686] env[61356]: DEBUG oslo_vmware.api [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072446} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.734686] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1231.734853] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1231.734936] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1231.735118] env[61356]: INFO nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1231.738638] env[61356]: DEBUG nova.compute.claims [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1231.738638] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.738638] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.996979] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1231.997254] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Creating directory with path [datastore2] vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.997484] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-725a27bc-7e17-4382-9c93-3b99797007bb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.021030] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Created directory with path [datastore2] vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.021787] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Fetch image to [datastore2] vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1232.021787] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1232.022280] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7619b29c-9d92-42a4-9646-2cf7bfa0edbe {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.029630] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878eaeaa-6b2d-4e71-9f4d-184333b95d29 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.040451] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d52c87-9f20-4359-88e8-a74c32f0814d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.074897] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5862a3be-aa96-4c16-a795-5648e01c566e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.080467] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-596719c4-8fea-4db6-a8ca-1290c3b50dae {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.102963] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1232.133493] env[61356]: DEBUG oslo_vmware.api [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Task: {'id': task-4302584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040672} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.135615] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1232.135806] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1232.135979] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1232.136167] env[61356]: INFO nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1232.136395] env[61356]: DEBUG oslo.service.loopingcall [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1232.136802] env[61356]: DEBUG nova.compute.manager [-] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1232.138915] env[61356]: DEBUG nova.compute.claims [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1232.139097] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.154080] env[61356]: DEBUG oslo_vmware.rw_handles [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1232.208716] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd186e0c-19ad-40c3-abdf-6aca23ffd1d4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.213457] env[61356]: DEBUG oslo_vmware.rw_handles [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1232.213623] env[61356]: DEBUG oslo_vmware.rw_handles [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1232.216720] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f40b3c5-2561-4a46-96e0-d6c9cf25a2c2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.245254] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e06a00-3db2-44ea-9f55-c2725c1125d8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.251822] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffcbcec-d86f-48f6-86d0-fecb8d6c947b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.264094] env[61356]: DEBUG nova.compute.provider_tree [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.275775] env[61356]: DEBUG nova.scheduler.client.report [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1232.290251] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.552s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.290772] env[61356]: ERROR nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1232.290772] env[61356]: Faults: ['InvalidArgument'] [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Traceback (most recent call last): [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self.driver.spawn(context, instance, image_meta, [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self._fetch_image_if_missing(context, vi) [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] image_cache(vi, tmp_image_ds_loc) [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] vm_util.copy_virtual_disk( [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] session._wait_for_task(vmdk_copy_task) [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] return self.wait_for_task(task_ref) [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] return evt.wait() [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] result = hub.switch() [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] return self.greenlet.switch() [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] self.f(*self.args, **self.kw) [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] raise exceptions.translate_fault(task_info.error) [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Faults: ['InvalidArgument'] [ 1232.290772] env[61356]: ERROR nova.compute.manager [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] [ 1232.291940] env[61356]: DEBUG nova.compute.utils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1232.292569] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.153s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.298017] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Build of instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 was re-scheduled: A specified parameter was not correct: fileType [ 1232.298017] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1232.298017] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1232.298017] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1232.298017] env[61356]: DEBUG nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1232.298017] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1232.630513] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296df294-9c7d-4768-966e-68d2e472acce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.643026] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048c9970-c852-451f-b2d1-b2e99e9f6663 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.679317] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c97b3e-63dd-46cc-ba44-745ffc7227c7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.682237] env[61356]: DEBUG nova.network.neutron [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.689179] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fceaaea-050c-45e0-a7a9-775185946de1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.703724] env[61356]: DEBUG nova.compute.provider_tree [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.705431] env[61356]: INFO nova.compute.manager [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Took 0.41 seconds to deallocate network for instance. [ 1232.714085] env[61356]: DEBUG nova.scheduler.client.report [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1232.727567] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.435s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.728348] env[61356]: ERROR nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Traceback (most recent call last): [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] result = getattr(controller, method)(*args, **kwargs) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._get(image_id) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] resp, body = self.http_client.get(url, headers=header) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.request(url, 'GET', **kwargs) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._handle_response(resp) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise exc.from_response(resp, resp.content) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] During handling of the above exception, another exception occurred: [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Traceback (most recent call last): [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self.driver.spawn(context, instance, image_meta, [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._fetch_image_if_missing(context, vi) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] image_fetch(context, vi, tmp_image_ds_loc) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] images.fetch_image( [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] metadata = IMAGE_API.get(context, image_ref) [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1232.728348] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return session.show(context, image_id, [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] _reraise_translated_image_exception(image_id) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise new_exc.with_traceback(exc_trace) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] result = getattr(controller, method)(*args, **kwargs) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._get(image_id) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] resp, body = self.http_client.get(url, headers=header) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.request(url, 'GET', **kwargs) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self._handle_response(resp) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise exc.from_response(resp, resp.content) [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1232.730238] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1232.730238] env[61356]: DEBUG nova.compute.utils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1232.731937] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Build of instance 1df51a6d-1303-4c89-b61e-95070acf0772 was re-scheduled: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1232.731937] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1232.731937] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.731937] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquired lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.732139] env[61356]: DEBUG nova.network.neutron [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1232.769898] env[61356]: DEBUG nova.network.neutron [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1232.815265] env[61356]: INFO nova.scheduler.client.report [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleted allocations for instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 [ 1232.833808] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd0ad7aa-1dc2-4402-9a34-912c81befad0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 644.715s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.834986] env[61356]: DEBUG oslo_concurrency.lockutils [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 446.551s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.835258] env[61356]: DEBUG oslo_concurrency.lockutils [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1232.835466] env[61356]: DEBUG oslo_concurrency.lockutils [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1232.835636] env[61356]: DEBUG oslo_concurrency.lockutils [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.837649] env[61356]: INFO nova.compute.manager [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Terminating instance [ 1232.839565] env[61356]: DEBUG nova.compute.manager [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1232.839704] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1232.840529] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1409de04-b7ca-406f-bdfe-2f6a6e0a256a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.850250] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25489a35-c566-4979-b4e2-77003a488381 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.861339] env[61356]: DEBUG nova.compute.manager [None req-b8b9b7a7-de83-4ba1-a64b-c7a4cdf05042 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: ce5f2517-fbf8-4ecc-b6f5-24c56238786b] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1232.867241] env[61356]: DEBUG nova.network.neutron [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.882651] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2fb236e9-1c31-4bb9-91e8-cbe367e0a989 could not be found. [ 1232.883278] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1232.883278] env[61356]: INFO nova.compute.manager [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1232.883278] env[61356]: DEBUG oslo.service.loopingcall [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1232.884030] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Releasing lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.884030] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1232.884159] env[61356]: DEBUG nova.compute.manager [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1232.886414] env[61356]: DEBUG nova.compute.manager [-] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1232.886511] env[61356]: DEBUG nova.network.neutron [-] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1232.899128] env[61356]: DEBUG nova.compute.manager [None req-b8b9b7a7-de83-4ba1-a64b-c7a4cdf05042 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: ce5f2517-fbf8-4ecc-b6f5-24c56238786b] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1232.915995] env[61356]: DEBUG nova.network.neutron [-] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.925026] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b8b9b7a7-de83-4ba1-a64b-c7a4cdf05042 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "ce5f2517-fbf8-4ecc-b6f5-24c56238786b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.143s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.936999] env[61356]: INFO nova.compute.manager [-] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] Took 0.05 seconds to deallocate network for instance. [ 1232.944546] env[61356]: DEBUG nova.compute.manager [None req-3d8d6870-62c5-41e2-9145-b8ed903f3eed tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] [instance: 508170e0-afe4-4ff5-94cc-d7b093cfd7d4] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1232.967696] env[61356]: DEBUG nova.compute.manager [None req-3d8d6870-62c5-41e2-9145-b8ed903f3eed tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] [instance: 508170e0-afe4-4ff5-94cc-d7b093cfd7d4] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1233.006878] env[61356]: INFO nova.scheduler.client.report [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Deleted allocations for instance 1df51a6d-1303-4c89-b61e-95070acf0772 [ 1233.016589] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d8d6870-62c5-41e2-9145-b8ed903f3eed tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] Lock "508170e0-afe4-4ff5-94cc-d7b093cfd7d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.685s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.038762] env[61356]: DEBUG nova.compute.manager [None req-3002e83c-56a8-4f45-bc8a-b932af2b571f tempest-InstanceActionsNegativeTestJSON-1868473628 tempest-InstanceActionsNegativeTestJSON-1868473628-project-member] [instance: bd723957-6a15-4d23-88a4-c708778bee39] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1233.042965] env[61356]: DEBUG oslo_concurrency.lockutils [None req-656a5733-1396-4193-ace3-0644c5d6645c tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "1df51a6d-1303-4c89-b61e-95070acf0772" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.043s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.044345] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "1df51a6d-1303-4c89-b61e-95070acf0772" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.495s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.044567] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "1df51a6d-1303-4c89-b61e-95070acf0772-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.044926] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "1df51a6d-1303-4c89-b61e-95070acf0772-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.044926] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "1df51a6d-1303-4c89-b61e-95070acf0772-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.047643] env[61356]: INFO nova.compute.manager [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Terminating instance [ 1233.051026] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquiring lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.051307] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Acquired lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.051486] env[61356]: DEBUG nova.network.neutron [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1233.054868] env[61356]: DEBUG nova.compute.manager [None req-8dab85de-0ef6-453e-a081-8722062b2bf8 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] [instance: c63173e3-52d8-4412-a225-37b67187126a] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1233.079094] env[61356]: DEBUG nova.compute.manager [None req-3002e83c-56a8-4f45-bc8a-b932af2b571f tempest-InstanceActionsNegativeTestJSON-1868473628 tempest-InstanceActionsNegativeTestJSON-1868473628-project-member] [instance: bd723957-6a15-4d23-88a4-c708778bee39] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1233.089191] env[61356]: DEBUG nova.compute.manager [None req-8dab85de-0ef6-453e-a081-8722062b2bf8 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] [instance: c63173e3-52d8-4412-a225-37b67187126a] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1233.098837] env[61356]: DEBUG oslo_concurrency.lockutils [None req-580074a7-9062-40ab-8650-bb08f01d1dfc tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.264s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.099963] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 114.060s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.099963] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2fb236e9-1c31-4bb9-91e8-cbe367e0a989] During sync_power_state the instance has a pending task (deleting). Skip. [ 1233.100095] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "2fb236e9-1c31-4bb9-91e8-cbe367e0a989" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.100819] env[61356]: DEBUG nova.network.neutron [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1233.103313] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3002e83c-56a8-4f45-bc8a-b932af2b571f tempest-InstanceActionsNegativeTestJSON-1868473628 tempest-InstanceActionsNegativeTestJSON-1868473628-project-member] Lock "bd723957-6a15-4d23-88a4-c708778bee39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.085s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.113445] env[61356]: DEBUG nova.compute.manager [None req-54870c06-6207-4262-95ec-1b2d0fd2557f tempest-ServerShowV254Test-864364288 tempest-ServerShowV254Test-864364288-project-member] [instance: 03f22cd3-fea8-43be-a4db-f65521c832a6] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1233.116379] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8dab85de-0ef6-453e-a081-8722062b2bf8 tempest-AttachVolumeNegativeTest-941082563 tempest-AttachVolumeNegativeTest-941082563-project-member] Lock "c63173e3-52d8-4412-a225-37b67187126a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.098s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.127395] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1233.148436] env[61356]: DEBUG nova.compute.manager [None req-54870c06-6207-4262-95ec-1b2d0fd2557f tempest-ServerShowV254Test-864364288 tempest-ServerShowV254Test-864364288-project-member] [instance: 03f22cd3-fea8-43be-a4db-f65521c832a6] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1233.174190] env[61356]: DEBUG oslo_concurrency.lockutils [None req-54870c06-6207-4262-95ec-1b2d0fd2557f tempest-ServerShowV254Test-864364288 tempest-ServerShowV254Test-864364288-project-member] Lock "03f22cd3-fea8-43be-a4db-f65521c832a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.419s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.185740] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1233.194470] env[61356]: DEBUG nova.network.neutron [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.196325] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.196556] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.198086] env[61356]: INFO nova.compute.claims [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1233.204247] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Releasing lock "refresh_cache-1df51a6d-1303-4c89-b61e-95070acf0772" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.204247] env[61356]: DEBUG nova.compute.manager [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1233.204247] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1233.204247] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f4fa157-6451-4d6f-81c7-f2078e77f0c2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.212848] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac93b53-c0f3-4883-b32f-c22186d17f59 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.240047] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1df51a6d-1303-4c89-b61e-95070acf0772 could not be found. [ 1233.240253] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1233.240431] env[61356]: INFO nova.compute.manager [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1233.240665] env[61356]: DEBUG oslo.service.loopingcall [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1233.242811] env[61356]: DEBUG nova.compute.manager [-] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1233.242931] env[61356]: DEBUG nova.network.neutron [-] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1233.259768] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.372195] env[61356]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61356) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1233.372395] env[61356]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-dadc2546-5ca5-4cbb-ac1c-f53f30a52978'] [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1233.374046] env[61356]: ERROR oslo.service.loopingcall [ 1233.375718] env[61356]: ERROR nova.compute.manager [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1233.407554] env[61356]: ERROR nova.compute.manager [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Traceback (most recent call last): [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] ret = obj(*args, **kwargs) [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] exception_handler_v20(status_code, error_body) [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise client_exc(message=error_message, [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Neutron server returns request_ids: ['req-dadc2546-5ca5-4cbb-ac1c-f53f30a52978'] [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] During handling of the above exception, another exception occurred: [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Traceback (most recent call last): [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._delete_instance(context, instance, bdms) [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._shutdown_instance(context, instance, bdms) [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._try_deallocate_network(context, instance, requested_networks) [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] with excutils.save_and_reraise_exception(): [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self.force_reraise() [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise self.value [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] _deallocate_network_with_retries() [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return evt.wait() [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] result = hub.switch() [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.greenlet.switch() [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] result = func(*self.args, **self.kw) [ 1233.407554] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] result = f(*args, **kwargs) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._deallocate_network( [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self.network_api.deallocate_for_instance( [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] data = neutron.list_ports(**search_opts) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] ret = obj(*args, **kwargs) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.list('ports', self.ports_path, retrieve_all, [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] ret = obj(*args, **kwargs) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] for r in self._pagination(collection, path, **params): [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] res = self.get(path, params=params) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] ret = obj(*args, **kwargs) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.retry_request("GET", action, body=body, [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] ret = obj(*args, **kwargs) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] return self.do_request(method, action, body=body, [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] ret = obj(*args, **kwargs) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] self._handle_fault_response(status_code, replybody, resp) [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1233.409276] env[61356]: ERROR nova.compute.manager [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] [ 1233.437088] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Lock "1df51a6d-1303-4c89-b61e-95070acf0772" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.393s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.438253] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "1df51a6d-1303-4c89-b61e-95070acf0772" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 114.399s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.439169] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] During sync_power_state the instance has a pending task (deleting). Skip. [ 1233.439169] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "1df51a6d-1303-4c89-b61e-95070acf0772" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.488061] env[61356]: INFO nova.compute.manager [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] [instance: 1df51a6d-1303-4c89-b61e-95070acf0772] Successfully reverted task state from None on failure for instance. [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server [None req-1c1d69fb-7b34-44b6-bd80-4c775c4a23f1 tempest-ServersListShow296Test-120013467 tempest-ServersListShow296Test-120013467-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-dadc2546-5ca5-4cbb-ac1c-f53f30a52978'] [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1233.492408] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1233.494137] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1233.495850] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1233.495850] env[61356]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1233.495850] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1233.495850] env[61356]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1233.495850] env[61356]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1233.495850] env[61356]: ERROR oslo_messaging.rpc.server [ 1233.515854] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c762ee0-9c98-481d-a291-eb35b0394e9f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.522718] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354ce639-f14b-43a0-bab1-5952044c38f7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.553496] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f44dd5-33bf-45ff-8af6-3f53593d278a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.560623] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75c92b2-aa86-440a-a69f-296645819b6b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.573319] env[61356]: DEBUG nova.compute.provider_tree [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.583502] env[61356]: DEBUG nova.scheduler.client.report [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1233.597219] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.597674] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1233.600535] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.340s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.601555] env[61356]: INFO nova.compute.claims [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1233.629227] env[61356]: DEBUG nova.compute.utils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1233.630416] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1233.630586] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1233.639389] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1233.692193] env[61356]: DEBUG nova.policy [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9152ef2384c24cb491cbded347806a38', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f84662a3f81451aabb0682ee9f610c2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1233.715037] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1233.743127] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1233.743409] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1233.743603] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1233.743828] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1233.744029] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1233.744212] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1233.744446] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1233.744645] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1233.745014] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1233.745192] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1233.745322] env[61356]: DEBUG nova.virt.hardware [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1233.746286] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1446b93b-9edd-420b-bb57-379c4d493359 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.758393] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace35106-cba0-4a2c-a299-cdc44991df5d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.936615] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675748af-6e55-486b-96a5-f662fc98fe2a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.944261] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fd96b8-86a1-445e-9359-0b57657c2317 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.977198] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3781f3-5741-4a25-a00a-c46710913431 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.984489] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77250426-5ae6-40f2-99cd-80d40c7c451f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.998053] env[61356]: DEBUG nova.compute.provider_tree [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.008581] env[61356]: DEBUG nova.scheduler.client.report [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1234.024745] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.424s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.025051] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1234.074233] env[61356]: DEBUG nova.compute.utils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1234.075454] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1234.075624] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1234.082648] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Successfully created port: 96fac4bb-6a91-42c2-88e7-8f229a174227 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1234.087085] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1234.173024] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1234.185508] env[61356]: DEBUG nova.policy [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a0ce4083a1f4437acbc46e18e756d20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '193c1db45c694637b363d580f34dfef8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1234.202457] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1234.202967] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1234.204016] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1234.204016] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1234.204016] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1234.204016] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1234.204016] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1234.204386] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1234.204669] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1234.204953] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1234.205299] env[61356]: DEBUG nova.virt.hardware [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1234.206305] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e0b6a9-7a25-4ab2-b63f-b3924770cbb4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.216483] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bf7167-562a-4edf-8be6-2c8ad10c25cf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.567657] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "3bc9faac-35bb-4303-937f-3682771e9258" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.703348] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Successfully updated port: 96fac4bb-6a91-42c2-88e7-8f229a174227 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1234.722595] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.722595] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquired lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.722595] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1234.741580] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Successfully created port: 0f16f908-3b5e-4b2c-878e-345e74ab32d9 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1234.773162] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1234.857451] env[61356]: DEBUG nova.compute.manager [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Received event network-vif-plugged-96fac4bb-6a91-42c2-88e7-8f229a174227 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1234.857665] env[61356]: DEBUG oslo_concurrency.lockutils [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] Acquiring lock "3bc9faac-35bb-4303-937f-3682771e9258-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.857867] env[61356]: DEBUG oslo_concurrency.lockutils [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] Lock "3bc9faac-35bb-4303-937f-3682771e9258-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.858152] env[61356]: DEBUG oslo_concurrency.lockutils [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] Lock "3bc9faac-35bb-4303-937f-3682771e9258-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.858392] env[61356]: DEBUG nova.compute.manager [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] No waiting events found dispatching network-vif-plugged-96fac4bb-6a91-42c2-88e7-8f229a174227 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1234.859031] env[61356]: WARNING nova.compute.manager [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Received unexpected event network-vif-plugged-96fac4bb-6a91-42c2-88e7-8f229a174227 for instance with vm_state building and task_state deleting. [ 1234.859031] env[61356]: DEBUG nova.compute.manager [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Received event network-changed-96fac4bb-6a91-42c2-88e7-8f229a174227 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1234.859031] env[61356]: DEBUG nova.compute.manager [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Refreshing instance network info cache due to event network-changed-96fac4bb-6a91-42c2-88e7-8f229a174227. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1234.859320] env[61356]: DEBUG oslo_concurrency.lockutils [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] Acquiring lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.956565] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Updating instance_info_cache with network_info: [{"id": "96fac4bb-6a91-42c2-88e7-8f229a174227", "address": "fa:16:3e:8b:5e:d1", "network": {"id": "0fa2e1e6-06c4-4c14-8c03-5ae3661f0661", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1311968491-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f84662a3f81451aabb0682ee9f610c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fac4bb-6a", "ovs_interfaceid": "96fac4bb-6a91-42c2-88e7-8f229a174227", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.972643] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Releasing lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.972989] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance network_info: |[{"id": "96fac4bb-6a91-42c2-88e7-8f229a174227", "address": "fa:16:3e:8b:5e:d1", "network": {"id": "0fa2e1e6-06c4-4c14-8c03-5ae3661f0661", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1311968491-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f84662a3f81451aabb0682ee9f610c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fac4bb-6a", "ovs_interfaceid": "96fac4bb-6a91-42c2-88e7-8f229a174227", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1234.973325] env[61356]: DEBUG oslo_concurrency.lockutils [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] Acquired lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.973764] env[61356]: DEBUG nova.network.neutron [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Refreshing network info cache for port 96fac4bb-6a91-42c2-88e7-8f229a174227 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1234.975270] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:5e:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96fac4bb-6a91-42c2-88e7-8f229a174227', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1234.984471] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Creating folder: Project (0f84662a3f81451aabb0682ee9f610c2). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1234.987439] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a2f06a5-a31d-4f42-95e4-a100681b0bbd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.998845] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Created folder: Project (0f84662a3f81451aabb0682ee9f610c2) in parent group-v846926. [ 1234.999045] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Creating folder: Instances. Parent ref: group-v847000. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1234.999283] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c6962c7-8f4a-4e79-87a9-ca28ca2d6647 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.010658] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Created folder: Instances in parent group-v847000. [ 1235.010940] env[61356]: DEBUG oslo.service.loopingcall [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1235.011397] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1235.011484] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4197559b-7dd6-4ec6-9283-168a67ab0158 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.033724] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1235.033724] env[61356]: value = "task-4302587" [ 1235.033724] env[61356]: _type = "Task" [ 1235.033724] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.043787] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302587, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.274716] env[61356]: DEBUG nova.network.neutron [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Updated VIF entry in instance network info cache for port 96fac4bb-6a91-42c2-88e7-8f229a174227. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1235.275084] env[61356]: DEBUG nova.network.neutron [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Updating instance_info_cache with network_info: [{"id": "96fac4bb-6a91-42c2-88e7-8f229a174227", "address": "fa:16:3e:8b:5e:d1", "network": {"id": "0fa2e1e6-06c4-4c14-8c03-5ae3661f0661", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1311968491-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f84662a3f81451aabb0682ee9f610c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0b29c52-62b0-4a9e-8e1c-41cf6ac8b916", "external-id": "nsx-vlan-transportzone-143", "segmentation_id": 143, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96fac4bb-6a", "ovs_interfaceid": "96fac4bb-6a91-42c2-88e7-8f229a174227", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.286255] env[61356]: DEBUG oslo_concurrency.lockutils [req-b28d1989-f54a-4437-845e-1441bcccd8bc req-502ebf7a-9590-40e7-a433-2ed0251b41cd service nova] Releasing lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.544422] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302587, 'name': CreateVM_Task, 'duration_secs': 0.328583} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.544589] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1235.545293] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.545447] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.545788] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1235.546067] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75d587bb-1931-4b43-a8db-560f492b56aa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.550824] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Waiting for the task: (returnval){ [ 1235.550824] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c56a41-9c27-7a14-2058-3b993a2a6759" [ 1235.550824] env[61356]: _type = "Task" [ 1235.550824] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.558221] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c56a41-9c27-7a14-2058-3b993a2a6759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.869141] env[61356]: DEBUG nova.compute.manager [req-4adfa3d2-b5b6-46e1-a089-be4678e39f6a req-156e592d-61a3-41fd-b9cb-7544da6d70ae service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Received event network-vif-plugged-0f16f908-3b5e-4b2c-878e-345e74ab32d9 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1235.869361] env[61356]: DEBUG oslo_concurrency.lockutils [req-4adfa3d2-b5b6-46e1-a089-be4678e39f6a req-156e592d-61a3-41fd-b9cb-7544da6d70ae service nova] Acquiring lock "7d397e6b-a49f-410a-afcc-23c4c462024b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.869621] env[61356]: DEBUG oslo_concurrency.lockutils [req-4adfa3d2-b5b6-46e1-a089-be4678e39f6a req-156e592d-61a3-41fd-b9cb-7544da6d70ae service nova] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.869906] env[61356]: DEBUG oslo_concurrency.lockutils [req-4adfa3d2-b5b6-46e1-a089-be4678e39f6a req-156e592d-61a3-41fd-b9cb-7544da6d70ae service nova] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.872882] env[61356]: DEBUG nova.compute.manager [req-4adfa3d2-b5b6-46e1-a089-be4678e39f6a req-156e592d-61a3-41fd-b9cb-7544da6d70ae service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] No waiting events found dispatching network-vif-plugged-0f16f908-3b5e-4b2c-878e-345e74ab32d9 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1235.873131] env[61356]: WARNING nova.compute.manager [req-4adfa3d2-b5b6-46e1-a089-be4678e39f6a req-156e592d-61a3-41fd-b9cb-7544da6d70ae service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Received unexpected event network-vif-plugged-0f16f908-3b5e-4b2c-878e-345e74ab32d9 for instance with vm_state building and task_state spawning. [ 1235.961507] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Successfully updated port: 0f16f908-3b5e-4b2c-878e-345e74ab32d9 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1235.973855] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "refresh_cache-7d397e6b-a49f-410a-afcc-23c4c462024b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1235.973996] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquired lock "refresh_cache-7d397e6b-a49f-410a-afcc-23c4c462024b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.974158] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1236.036747] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1236.062416] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.062671] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.062880] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.322874] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Updating instance_info_cache with network_info: [{"id": "0f16f908-3b5e-4b2c-878e-345e74ab32d9", "address": "fa:16:3e:e4:86:e5", "network": {"id": "064db6e0-a867-4157-91d7-4050b53080bf", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2084199213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "193c1db45c694637b363d580f34dfef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f16f908-3b", "ovs_interfaceid": "0f16f908-3b5e-4b2c-878e-345e74ab32d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.341250] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Releasing lock "refresh_cache-7d397e6b-a49f-410a-afcc-23c4c462024b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1236.341570] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Instance network_info: |[{"id": "0f16f908-3b5e-4b2c-878e-345e74ab32d9", "address": "fa:16:3e:e4:86:e5", "network": {"id": "064db6e0-a867-4157-91d7-4050b53080bf", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2084199213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "193c1db45c694637b363d580f34dfef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f16f908-3b", "ovs_interfaceid": "0f16f908-3b5e-4b2c-878e-345e74ab32d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1236.341996] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:86:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d39252e-42ef-4252-98d3-62af5a0d109d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f16f908-3b5e-4b2c-878e-345e74ab32d9', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1236.349377] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Creating folder: Project (193c1db45c694637b363d580f34dfef8). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1236.349969] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5560773e-cbb1-468c-8ac3-57b8dfdf785c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.359011] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Created folder: Project (193c1db45c694637b363d580f34dfef8) in parent group-v846926. [ 1236.359205] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Creating folder: Instances. Parent ref: group-v847003. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1236.359429] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-975dc61e-a709-42f9-b867-366daa792540 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.367437] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Created folder: Instances in parent group-v847003. [ 1236.367663] env[61356]: DEBUG oslo.service.loopingcall [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1236.367845] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1236.369054] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-841f77e1-c8e5-4a4c-b398-43f1a03b53fc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.386651] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1236.386651] env[61356]: value = "task-4302590" [ 1236.386651] env[61356]: _type = "Task" [ 1236.386651] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.394399] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302590, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.896562] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302590, 'name': CreateVM_Task, 'duration_secs': 0.308946} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.896741] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1236.897428] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.897651] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.897977] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1236.898239] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e012f87b-1365-4778-8394-b6d2aecc30b8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.903177] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Waiting for the task: (returnval){ [ 1236.903177] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52b3da17-a5b5-55c4-f306-27a3edf7c742" [ 1236.903177] env[61356]: _type = "Task" [ 1236.903177] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.911217] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52b3da17-a5b5-55c4-f306-27a3edf7c742, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.415022] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.415022] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1237.415381] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.896422] env[61356]: DEBUG nova.compute.manager [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Received event network-changed-0f16f908-3b5e-4b2c-878e-345e74ab32d9 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1237.896621] env[61356]: DEBUG nova.compute.manager [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Refreshing instance network info cache due to event network-changed-0f16f908-3b5e-4b2c-878e-345e74ab32d9. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1237.896848] env[61356]: DEBUG oslo_concurrency.lockutils [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] Acquiring lock "refresh_cache-7d397e6b-a49f-410a-afcc-23c4c462024b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.897016] env[61356]: DEBUG oslo_concurrency.lockutils [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] Acquired lock "refresh_cache-7d397e6b-a49f-410a-afcc-23c4c462024b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.897201] env[61356]: DEBUG nova.network.neutron [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Refreshing network info cache for port 0f16f908-3b5e-4b2c-878e-345e74ab32d9 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1238.246398] env[61356]: DEBUG nova.network.neutron [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Updated VIF entry in instance network info cache for port 0f16f908-3b5e-4b2c-878e-345e74ab32d9. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1238.246829] env[61356]: DEBUG nova.network.neutron [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Updating instance_info_cache with network_info: [{"id": "0f16f908-3b5e-4b2c-878e-345e74ab32d9", "address": "fa:16:3e:e4:86:e5", "network": {"id": "064db6e0-a867-4157-91d7-4050b53080bf", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2084199213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "193c1db45c694637b363d580f34dfef8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d39252e-42ef-4252-98d3-62af5a0d109d", "external-id": "nsx-vlan-transportzone-190", "segmentation_id": 190, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f16f908-3b", "ovs_interfaceid": "0f16f908-3b5e-4b2c-878e-345e74ab32d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.258208] env[61356]: DEBUG oslo_concurrency.lockutils [req-36e97f80-7823-4405-8a6a-94e715f88598 req-515820fc-94b9-4738-8b8b-2f5841103509 service nova] Releasing lock "refresh_cache-7d397e6b-a49f-410a-afcc-23c4c462024b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.443228] env[61356]: DEBUG oslo_concurrency.lockutils [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "7d397e6b-a49f-410a-afcc-23c4c462024b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.715443] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1251.715823] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.277226] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b45f01f9-4dbd-4825-92c9-ab48d2739f26 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "e4d2004e-9c64-44e7-b510-895ae532e546" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.277553] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b45f01f9-4dbd-4825-92c9-ab48d2739f26 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "e4d2004e-9c64-44e7-b510-895ae532e546" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.614379] env[61356]: WARNING oslo_vmware.rw_handles [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1280.614379] env[61356]: ERROR oslo_vmware.rw_handles [ 1280.614993] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1280.616761] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1280.617124] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Copying Virtual Disk [datastore2] vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/d0b249e2-72ac-480e-8f35-19d65715cf29/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1280.617490] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0afa4977-2dc7-48fe-ad36-a541dfcf90ca {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.625372] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Waiting for the task: (returnval){ [ 1280.625372] env[61356]: value = "task-4302591" [ 1280.625372] env[61356]: _type = "Task" [ 1280.625372] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.633163] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Task: {'id': task-4302591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.136089] env[61356]: DEBUG oslo_vmware.exceptions [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1281.136089] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.136522] env[61356]: ERROR nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1281.136522] env[61356]: Faults: ['InvalidArgument'] [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Traceback (most recent call last): [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] yield resources [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self.driver.spawn(context, instance, image_meta, [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self._fetch_image_if_missing(context, vi) [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] image_cache(vi, tmp_image_ds_loc) [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] vm_util.copy_virtual_disk( [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] session._wait_for_task(vmdk_copy_task) [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] return self.wait_for_task(task_ref) [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] return evt.wait() [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] result = hub.switch() [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] return self.greenlet.switch() [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self.f(*self.args, **self.kw) [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] raise exceptions.translate_fault(task_info.error) [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Faults: ['InvalidArgument'] [ 1281.136522] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] [ 1281.137299] env[61356]: INFO nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Terminating instance [ 1281.139643] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1281.139844] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1281.140156] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.140354] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.141101] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0229cbbe-d81a-410b-8601-e207c76b1604 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.143636] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6369ca25-7ce8-407a-88e4-4cf8f032c64a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.149241] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1281.149469] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2754d401-60ae-4035-9b19-a97724a31f02 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.151689] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.151888] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1281.152864] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc5c1711-d912-43f2-ac0d-4e12723170d9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.157487] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 1281.157487] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e00081-a425-203b-0eaf-853293e65d98" [ 1281.157487] env[61356]: _type = "Task" [ 1281.157487] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.164657] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e00081-a425-203b-0eaf-853293e65d98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.218668] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1281.218918] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1281.219112] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Deleting the datastore file [datastore2] 2d2dca63-8ab0-4420-90b0-38a330df8271 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1281.219439] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db2b62e7-0c12-42f8-9c76-a4552b363417 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.225498] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Waiting for the task: (returnval){ [ 1281.225498] env[61356]: value = "task-4302593" [ 1281.225498] env[61356]: _type = "Task" [ 1281.225498] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.232827] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Task: {'id': task-4302593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.667892] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1281.668285] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating directory with path [datastore2] vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.668535] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc9fb019-2892-48e8-bd69-805c10231a24 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.679921] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Created directory with path [datastore2] vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.680162] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Fetch image to [datastore2] vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1281.680308] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1281.681356] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2721f2c1-18c8-4551-be4f-09aef3d4669d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.687274] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e86aba-01a7-4b0e-8de1-0db326806658 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.697167] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8221be1-41e1-4f9e-a3ac-32718f9c7857 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.729734] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ebe5d8-60f7-422b-b4e2-56df9e02a8e3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.736470] env[61356]: DEBUG oslo_vmware.api [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Task: {'id': task-4302593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072711} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.737887] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1281.738098] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1281.738274] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1281.738447] env[61356]: INFO nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1281.740631] env[61356]: DEBUG nova.compute.claims [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1281.740811] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.741041] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.744639] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-71f76938-2787-4582-a332-e67dcef9e428 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.758187] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.758445] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.764737] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1281.770085] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.824431] env[61356]: DEBUG oslo_vmware.rw_handles [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1281.884979] env[61356]: DEBUG oslo_vmware.rw_handles [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1281.884979] env[61356]: DEBUG oslo_vmware.rw_handles [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1282.092873] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12a6f09-6a28-480f-b141-d7ce4465192d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.100486] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6534e5-ec5e-4fba-abf4-b878fbefacef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.130669] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9a756a-8c10-41f0-9982-d307659389a7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.137131] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98e9f0e-2dc4-4c03-a27f-0952013b71fe {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.149919] env[61356]: DEBUG nova.compute.provider_tree [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.158745] env[61356]: DEBUG nova.scheduler.client.report [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1282.172140] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.431s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.172642] env[61356]: ERROR nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.172642] env[61356]: Faults: ['InvalidArgument'] [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Traceback (most recent call last): [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self.driver.spawn(context, instance, image_meta, [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self._fetch_image_if_missing(context, vi) [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] image_cache(vi, tmp_image_ds_loc) [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] vm_util.copy_virtual_disk( [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] session._wait_for_task(vmdk_copy_task) [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] return self.wait_for_task(task_ref) [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] return evt.wait() [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] result = hub.switch() [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] return self.greenlet.switch() [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] self.f(*self.args, **self.kw) [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] raise exceptions.translate_fault(task_info.error) [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Faults: ['InvalidArgument'] [ 1282.172642] env[61356]: ERROR nova.compute.manager [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] [ 1282.173449] env[61356]: DEBUG nova.compute.utils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1282.174415] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.404s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.174597] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.174747] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1282.175409] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Build of instance 2d2dca63-8ab0-4420-90b0-38a330df8271 was re-scheduled: A specified parameter was not correct: fileType [ 1282.175409] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1282.175813] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1282.175983] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1282.176168] env[61356]: DEBUG nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1282.176327] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1282.178555] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a0d4e4-147c-448c-bb6d-b8add04b6270 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.186198] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0187467a-273d-4838-be38-3e82e1d8db91 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.199396] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1684a729-2245-456a-86ad-71afed594414 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.205282] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ece219b-d348-4ecc-a479-9ba4cf596874 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.233160] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180691MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1282.233314] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.233494] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.324072] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.324248] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 22e72346-49c6-4044-ae29-96679d4f2253 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.324382] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.324507] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.324626] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.324742] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.324857] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.324973] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.325102] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.325219] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1282.336377] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.346713] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.356307] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bca58b74-2a67-4943-b6f3-52dec9227ceb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.365678] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bec41d8f-c7c9-4c23-af43-0ef4816f631e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.375075] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d512abed-9776-449c-9de0-00925d888964 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.384350] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1c4d0b51-253c-4f0f-8c83-b341900afd28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.393156] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.402369] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b23c7c3d-86d9-4b3c-b121-099833985d7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.411940] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 15e801d1-823a-4a9c-876a-c60fa6c686f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.424909] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7170bb5-0399-44d5-adf0-6eca96e5f7b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.436694] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.447012] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance e4d2004e-9c64-44e7-b510-895ae532e546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1282.447535] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1282.447535] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=110GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1282.646046] env[61356]: DEBUG nova.network.neutron [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.658596] env[61356]: INFO nova.compute.manager [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Took 0.48 seconds to deallocate network for instance. [ 1282.763516] env[61356]: INFO nova.scheduler.client.report [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Deleted allocations for instance 2d2dca63-8ab0-4420-90b0-38a330df8271 [ 1282.771347] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12b7b87-546f-4653-99f3-cd967b57a57d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.780810] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dca5f01-8953-452d-85c8-9a7f68fb60b3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.811685] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6f4d4054-ff38-4138-8b74-995068dbc533 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.945s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.812469] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2931a38-e298-4e0a-9b93-c70ed16328f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.816035] env[61356]: DEBUG oslo_concurrency.lockutils [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.597s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.816304] env[61356]: DEBUG oslo_concurrency.lockutils [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Acquiring lock "2d2dca63-8ab0-4420-90b0-38a330df8271-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.816548] env[61356]: DEBUG oslo_concurrency.lockutils [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.816744] env[61356]: DEBUG oslo_concurrency.lockutils [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.819170] env[61356]: INFO nova.compute.manager [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Terminating instance [ 1282.823248] env[61356]: DEBUG nova.compute.manager [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1282.823486] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1282.823806] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4b53b0e-3f84-428a-99d6-0cb96dbb33ef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.826692] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b2de32-6136-4b9f-a56f-e0b0b23bdae9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.832027] env[61356]: DEBUG nova.compute.manager [None req-6b649a2c-7c1f-4d12-b08b-1c48497e9a02 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 1fe3a0c0-6d61-450a-8754-ce82f6b24661] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.843600] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.847432] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a219c46-ff8d-4618-ae18-b7788f0163e3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.859733] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1282.862803] env[61356]: DEBUG nova.compute.manager [None req-6b649a2c-7c1f-4d12-b08b-1c48497e9a02 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 1fe3a0c0-6d61-450a-8754-ce82f6b24661] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1282.875089] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1282.875290] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.642s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.884922] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2d2dca63-8ab0-4420-90b0-38a330df8271 could not be found. [ 1282.885141] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1282.885399] env[61356]: INFO nova.compute.manager [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1282.885555] env[61356]: DEBUG oslo.service.loopingcall [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1282.886598] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6b649a2c-7c1f-4d12-b08b-1c48497e9a02 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "1fe3a0c0-6d61-450a-8754-ce82f6b24661" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.847s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.887854] env[61356]: DEBUG nova.compute.manager [-] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1282.887854] env[61356]: DEBUG nova.network.neutron [-] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1282.906086] env[61356]: DEBUG nova.compute.manager [None req-16e1b0f1-2f41-40cc-a2a2-853d11f91c6d tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] [instance: da968e6f-ef0a-41b5-b4a5-d11c096ad018] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.922361] env[61356]: DEBUG nova.network.neutron [-] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.930829] env[61356]: INFO nova.compute.manager [-] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] Took 0.04 seconds to deallocate network for instance. [ 1282.941954] env[61356]: DEBUG nova.compute.manager [None req-16e1b0f1-2f41-40cc-a2a2-853d11f91c6d tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] [instance: da968e6f-ef0a-41b5-b4a5-d11c096ad018] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1282.964196] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16e1b0f1-2f41-40cc-a2a2-853d11f91c6d tempest-AttachVolumeShelveTestJSON-195659224 tempest-AttachVolumeShelveTestJSON-195659224-project-member] Lock "da968e6f-ef0a-41b5-b4a5-d11c096ad018" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.638s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.990026] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1283.064454] env[61356]: DEBUG oslo_concurrency.lockutils [None req-443ba3b6-7505-4a9f-ae0d-f4032056153a tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.248s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.065310] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 164.025s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.065500] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2d2dca63-8ab0-4420-90b0-38a330df8271] During sync_power_state the instance has a pending task (deleting). Skip. [ 1283.065675] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "2d2dca63-8ab0-4420-90b0-38a330df8271" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.074250] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.074479] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.077652] env[61356]: INFO nova.compute.claims [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1283.356450] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d7bfc2-d580-4ae3-995a-a81993219715 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.363972] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b488f52a-9d40-40dc-8a30-e6d5b6e448eb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.393104] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1a4510-81f5-49db-a798-10ddae0887f4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.399450] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b21af2b-1c13-4cf6-bb0b-b975b0264162 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.411767] env[61356]: DEBUG nova.compute.provider_tree [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.420351] env[61356]: DEBUG nova.scheduler.client.report [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.437222] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.363s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.437666] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1283.470539] env[61356]: DEBUG nova.compute.utils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1283.471900] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1283.472129] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1283.480938] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1283.542477] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1283.545976] env[61356]: DEBUG nova.policy [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1723df4943b94bf6b710e2bed2e5ceb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df25f0f186cf4f4e9c4bdcbfe237d296', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1283.567659] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1283.567898] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1283.568071] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1283.568258] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1283.568406] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1283.568554] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1283.568755] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1283.568913] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1283.569089] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1283.569252] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1283.569494] env[61356]: DEBUG nova.virt.hardware [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1283.570364] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780dfb14-fc51-4130-9036-6925040ae230 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.578023] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0babfc08-ee91-4f5b-b22b-5b1abf1d70b2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.121061] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Successfully created port: 39d11f80-f1c1-4c74-bc25-408e3929e071 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1284.880711] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.880711] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.067337] env[61356]: DEBUG nova.compute.manager [req-fa921b17-278d-4bb1-bd77-0fc2c28989a5 req-fb3c0cda-a4cb-4615-8890-9a19d374b537 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Received event network-vif-plugged-39d11f80-f1c1-4c74-bc25-408e3929e071 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1285.067337] env[61356]: DEBUG oslo_concurrency.lockutils [req-fa921b17-278d-4bb1-bd77-0fc2c28989a5 req-fb3c0cda-a4cb-4615-8890-9a19d374b537 service nova] Acquiring lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.067337] env[61356]: DEBUG oslo_concurrency.lockutils [req-fa921b17-278d-4bb1-bd77-0fc2c28989a5 req-fb3c0cda-a4cb-4615-8890-9a19d374b537 service nova] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.067611] env[61356]: DEBUG oslo_concurrency.lockutils [req-fa921b17-278d-4bb1-bd77-0fc2c28989a5 req-fb3c0cda-a4cb-4615-8890-9a19d374b537 service nova] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.068433] env[61356]: DEBUG nova.compute.manager [req-fa921b17-278d-4bb1-bd77-0fc2c28989a5 req-fb3c0cda-a4cb-4615-8890-9a19d374b537 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] No waiting events found dispatching network-vif-plugged-39d11f80-f1c1-4c74-bc25-408e3929e071 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1285.068433] env[61356]: WARNING nova.compute.manager [req-fa921b17-278d-4bb1-bd77-0fc2c28989a5 req-fb3c0cda-a4cb-4615-8890-9a19d374b537 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Received unexpected event network-vif-plugged-39d11f80-f1c1-4c74-bc25-408e3929e071 for instance with vm_state building and task_state spawning. [ 1285.123969] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Successfully updated port: 39d11f80-f1c1-4c74-bc25-408e3929e071 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1285.141423] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "refresh_cache-ec2e64e5-9c48-46e1-9010-78eb2c52ede6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.141423] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquired lock "refresh_cache-ec2e64e5-9c48-46e1-9010-78eb2c52ede6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.141423] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1285.404129] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1285.716224] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Updating instance_info_cache with network_info: [{"id": "39d11f80-f1c1-4c74-bc25-408e3929e071", "address": "fa:16:3e:29:a9:86", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d11f80-f1", "ovs_interfaceid": "39d11f80-f1c1-4c74-bc25-408e3929e071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.728013] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Releasing lock "refresh_cache-ec2e64e5-9c48-46e1-9010-78eb2c52ede6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.728325] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Instance network_info: |[{"id": "39d11f80-f1c1-4c74-bc25-408e3929e071", "address": "fa:16:3e:29:a9:86", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d11f80-f1", "ovs_interfaceid": "39d11f80-f1c1-4c74-bc25-408e3929e071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1285.728721] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:a9:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39d11f80-f1c1-4c74-bc25-408e3929e071', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1285.736192] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Creating folder: Project (df25f0f186cf4f4e9c4bdcbfe237d296). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1285.737017] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a66ed814-169b-4ff2-be8a-975c1c3edc65 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.747650] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Created folder: Project (df25f0f186cf4f4e9c4bdcbfe237d296) in parent group-v846926. [ 1285.747837] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Creating folder: Instances. Parent ref: group-v847006. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1285.748075] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f88ebec9-9c7f-4a34-bc74-9076afa0265d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.757232] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.757501] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Created folder: Instances in parent group-v847006. [ 1285.757717] env[61356]: DEBUG oslo.service.loopingcall [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1285.758051] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1285.758190] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2538e31e-8275-4f74-b895-32a5ca35cf09 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.776866] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1285.776866] env[61356]: value = "task-4302596" [ 1285.776866] env[61356]: _type = "Task" [ 1285.776866] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.786059] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302596, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.288057] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302596, 'name': CreateVM_Task, 'duration_secs': 0.308141} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.288347] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1286.289013] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.289202] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.289647] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1286.289922] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be6d44b7-5fb1-48e2-af98-37b4303acc0d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.294551] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Waiting for the task: (returnval){ [ 1286.294551] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a356b6-c949-fa85-c802-a39a2c85a121" [ 1286.294551] env[61356]: _type = "Task" [ 1286.294551] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.301824] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a356b6-c949-fa85-c802-a39a2c85a121, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.757940] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.758193] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.804488] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.804804] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1286.805074] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.092558] env[61356]: DEBUG nova.compute.manager [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Received event network-changed-39d11f80-f1c1-4c74-bc25-408e3929e071 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1287.092786] env[61356]: DEBUG nova.compute.manager [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Refreshing instance network info cache due to event network-changed-39d11f80-f1c1-4c74-bc25-408e3929e071. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1287.093046] env[61356]: DEBUG oslo_concurrency.lockutils [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] Acquiring lock "refresh_cache-ec2e64e5-9c48-46e1-9010-78eb2c52ede6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.093230] env[61356]: DEBUG oslo_concurrency.lockutils [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] Acquired lock "refresh_cache-ec2e64e5-9c48-46e1-9010-78eb2c52ede6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.093434] env[61356]: DEBUG nova.network.neutron [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Refreshing network info cache for port 39d11f80-f1c1-4c74-bc25-408e3929e071 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1287.500916] env[61356]: DEBUG nova.network.neutron [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Updated VIF entry in instance network info cache for port 39d11f80-f1c1-4c74-bc25-408e3929e071. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1287.501351] env[61356]: DEBUG nova.network.neutron [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Updating instance_info_cache with network_info: [{"id": "39d11f80-f1c1-4c74-bc25-408e3929e071", "address": "fa:16:3e:29:a9:86", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39d11f80-f1", "ovs_interfaceid": "39d11f80-f1c1-4c74-bc25-408e3929e071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.511851] env[61356]: DEBUG oslo_concurrency.lockutils [req-2186fdc9-6336-4e17-b912-0f9dea695d13 req-353dc30c-087c-4fc0-9af8-5e4160c58902 service nova] Releasing lock "refresh_cache-ec2e64e5-9c48-46e1-9010-78eb2c52ede6" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.757622] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.757824] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1289.757881] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1289.782217] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.782487] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.782695] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.782858] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.783024] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.783189] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.783301] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.783446] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.783607] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.783758] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1289.783905] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1289.784477] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.784658] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1300.537653] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.537957] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.907575] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1300.907807] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.067422] env[61356]: WARNING oslo_vmware.rw_handles [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1330.067422] env[61356]: ERROR oslo_vmware.rw_handles [ 1330.068062] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1330.069766] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1330.070060] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Copying Virtual Disk [datastore2] vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/3fa4516a-bf33-477a-b19d-6b5fbc8596e6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1330.070364] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a6d099c-0613-4f84-98aa-e04ad23a4236 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.079128] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 1330.079128] env[61356]: value = "task-4302597" [ 1330.079128] env[61356]: _type = "Task" [ 1330.079128] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.087080] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': task-4302597, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.590028] env[61356]: DEBUG oslo_vmware.exceptions [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1330.590295] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.590859] env[61356]: ERROR nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1330.590859] env[61356]: Faults: ['InvalidArgument'] [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Traceback (most recent call last): [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] yield resources [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self.driver.spawn(context, instance, image_meta, [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self._fetch_image_if_missing(context, vi) [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] image_cache(vi, tmp_image_ds_loc) [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] vm_util.copy_virtual_disk( [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] session._wait_for_task(vmdk_copy_task) [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] return self.wait_for_task(task_ref) [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] return evt.wait() [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] result = hub.switch() [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] return self.greenlet.switch() [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self.f(*self.args, **self.kw) [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] raise exceptions.translate_fault(task_info.error) [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Faults: ['InvalidArgument'] [ 1330.590859] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] [ 1330.591810] env[61356]: INFO nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Terminating instance [ 1330.592775] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.592987] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.594040] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1330.594040] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1330.594040] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36351e1d-22e8-4480-8186-be4b7ee965ed {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.596727] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2bc47e-8748-494a-a6e1-8265fe1aa934 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.603501] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1330.603722] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6056c7c2-8495-49c4-9c79-771930caaf06 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.605883] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.606074] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1330.606984] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2159b7d-faf9-47cb-89a9-ae642373d9bf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.611604] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Waiting for the task: (returnval){ [ 1330.611604] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]529cb21b-936d-8f2e-96c6-b0b5a10beea3" [ 1330.611604] env[61356]: _type = "Task" [ 1330.611604] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.618641] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]529cb21b-936d-8f2e-96c6-b0b5a10beea3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.679180] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1330.679180] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1330.679180] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Deleting the datastore file [datastore2] 22e72346-49c6-4044-ae29-96679d4f2253 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.679180] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f68ade03-344c-4e8d-b5e5-2bc052dad52c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.688134] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for the task: (returnval){ [ 1330.688134] env[61356]: value = "task-4302599" [ 1330.688134] env[61356]: _type = "Task" [ 1330.688134] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.695782] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': task-4302599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.123375] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1331.123740] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Creating directory with path [datastore2] vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.123848] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b31996bd-df34-4b32-8698-a20fb7267a23 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.136473] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Created directory with path [datastore2] vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.136656] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Fetch image to [datastore2] vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1331.136839] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1331.137559] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faafbb0a-2d81-461d-ac02-aa7d8a7ac263 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.144033] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4630cf6-52ee-4157-9e0f-d54c967325fa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.152633] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bd9426-f00c-4333-8a7a-19847dc94fa7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.181848] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4baa33c3-749f-43ac-ab7a-aae45536f508 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.187572] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-08e82a89-5f87-4a7a-b6b6-5be1f3b1fd07 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.196466] env[61356]: DEBUG oslo_vmware.api [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Task: {'id': task-4302599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078177} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.196652] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1331.196838] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1331.197029] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1331.197220] env[61356]: INFO nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1331.199546] env[61356]: DEBUG nova.compute.claims [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1331.199717] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.199970] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.210248] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1331.263856] env[61356]: DEBUG oslo_vmware.rw_handles [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1331.325645] env[61356]: DEBUG oslo_vmware.rw_handles [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1331.325916] env[61356]: DEBUG oslo_vmware.rw_handles [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1331.556536] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bb03c0-4667-49d8-bd62-b95bee29cdad {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.564261] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa38cb27-1123-4a4a-b1f7-343ecf31a924 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.595853] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e576ff24-f7b6-47bc-aa32-a03f1eb62c33 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.603081] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dd8456-209a-4e67-8fd1-bd80506ccb94 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.615635] env[61356]: DEBUG nova.compute.provider_tree [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.624437] env[61356]: DEBUG nova.scheduler.client.report [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1331.638272] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.438s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.638764] env[61356]: ERROR nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1331.638764] env[61356]: Faults: ['InvalidArgument'] [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Traceback (most recent call last): [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self.driver.spawn(context, instance, image_meta, [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self._fetch_image_if_missing(context, vi) [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] image_cache(vi, tmp_image_ds_loc) [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] vm_util.copy_virtual_disk( [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] session._wait_for_task(vmdk_copy_task) [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] return self.wait_for_task(task_ref) [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] return evt.wait() [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] result = hub.switch() [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] return self.greenlet.switch() [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] self.f(*self.args, **self.kw) [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] raise exceptions.translate_fault(task_info.error) [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Faults: ['InvalidArgument'] [ 1331.638764] env[61356]: ERROR nova.compute.manager [instance: 22e72346-49c6-4044-ae29-96679d4f2253] [ 1331.639742] env[61356]: DEBUG nova.compute.utils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1331.640993] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Build of instance 22e72346-49c6-4044-ae29-96679d4f2253 was re-scheduled: A specified parameter was not correct: fileType [ 1331.640993] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1331.641397] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1331.641572] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1331.641722] env[61356]: DEBUG nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1331.641880] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1332.100684] env[61356]: DEBUG nova.network.neutron [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.121018] env[61356]: INFO nova.compute.manager [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Took 0.48 seconds to deallocate network for instance. [ 1332.224023] env[61356]: INFO nova.scheduler.client.report [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Deleted allocations for instance 22e72346-49c6-4044-ae29-96679d4f2253 [ 1332.244024] env[61356]: DEBUG oslo_concurrency.lockutils [None req-58a91c5a-7c5c-4049-89cb-9dc23d79ba4d tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "22e72346-49c6-4044-ae29-96679d4f2253" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.223s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.244924] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "22e72346-49c6-4044-ae29-96679d4f2253" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.553s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.245180] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Acquiring lock "22e72346-49c6-4044-ae29-96679d4f2253-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.245425] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "22e72346-49c6-4044-ae29-96679d4f2253-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.245605] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "22e72346-49c6-4044-ae29-96679d4f2253-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.247604] env[61356]: INFO nova.compute.manager [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Terminating instance [ 1332.249643] env[61356]: DEBUG nova.compute.manager [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1332.249837] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1332.251460] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11c944d2-7cf4-4b79-a442-62f77cb15b10 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.259032] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527ab238-6ee3-4a31-9174-368687fe7b63 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.269497] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1332.290025] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 22e72346-49c6-4044-ae29-96679d4f2253 could not be found. [ 1332.290244] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1332.290425] env[61356]: INFO nova.compute.manager [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1332.290671] env[61356]: DEBUG oslo.service.loopingcall [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1332.291120] env[61356]: DEBUG nova.compute.manager [-] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1332.291120] env[61356]: DEBUG nova.network.neutron [-] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1332.316794] env[61356]: DEBUG nova.network.neutron [-] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.324376] env[61356]: INFO nova.compute.manager [-] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] Took 0.03 seconds to deallocate network for instance. [ 1332.330133] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.330133] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.331629] env[61356]: INFO nova.compute.claims [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1332.410497] env[61356]: DEBUG oslo_concurrency.lockutils [None req-16140ba6-e202-4dba-adbf-f98afb61e3bf tempest-MigrationsAdminTest-1581639656 tempest-MigrationsAdminTest-1581639656-project-member] Lock "22e72346-49c6-4044-ae29-96679d4f2253" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.411440] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "22e72346-49c6-4044-ae29-96679d4f2253" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 213.371s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.411652] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 22e72346-49c6-4044-ae29-96679d4f2253] During sync_power_state the instance has a pending task (deleting). Skip. [ 1332.411830] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "22e72346-49c6-4044-ae29-96679d4f2253" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.644676] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e050da-65af-4390-a0a1-be7f98302aac {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.652362] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc9e442-ea43-4ca4-8f24-fe3d62718913 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.680593] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55a890d-4d62-423d-a441-37f582397c46 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.687078] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0e7252-611c-4a7e-94df-98c63782e1a3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.700895] env[61356]: DEBUG nova.compute.provider_tree [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1332.710432] env[61356]: DEBUG nova.scheduler.client.report [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1332.727108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.397s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.727599] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1332.760292] env[61356]: DEBUG nova.compute.utils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1332.762273] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1332.762507] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1332.771046] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1332.830797] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1332.856106] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1332.856351] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1332.856510] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1332.856692] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1332.856842] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1332.856989] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1332.857306] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1332.857508] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1332.857710] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1332.857920] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1332.858165] env[61356]: DEBUG nova.virt.hardware [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1332.859033] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-face5090-70a1-45c3-ad16-896d8dfe647e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.866513] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57897d5c-324e-4b0b-b4ec-2a1e19a0f08d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.881318] env[61356]: DEBUG nova.policy [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '89c2ea569ffb416eb170549aaebb704a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fe9dfd77dff47cab342d5ebb764e770', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1333.392295] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Successfully created port: 4f110d82-c2fb-4503-b374-15f332510a7d {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1334.262858] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Successfully updated port: 4f110d82-c2fb-4503-b374-15f332510a7d {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1334.276349] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "refresh_cache-f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.276501] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "refresh_cache-f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.276652] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1334.358619] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1334.656823] env[61356]: DEBUG nova.compute.manager [req-5e090e60-5ce2-4d72-b73b-ba56ab175a59 req-7b42ae6f-540c-42d1-b51e-57876782cefb service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Received event network-vif-plugged-4f110d82-c2fb-4503-b374-15f332510a7d {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1334.657098] env[61356]: DEBUG oslo_concurrency.lockutils [req-5e090e60-5ce2-4d72-b73b-ba56ab175a59 req-7b42ae6f-540c-42d1-b51e-57876782cefb service nova] Acquiring lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.657247] env[61356]: DEBUG oslo_concurrency.lockutils [req-5e090e60-5ce2-4d72-b73b-ba56ab175a59 req-7b42ae6f-540c-42d1-b51e-57876782cefb service nova] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.657415] env[61356]: DEBUG oslo_concurrency.lockutils [req-5e090e60-5ce2-4d72-b73b-ba56ab175a59 req-7b42ae6f-540c-42d1-b51e-57876782cefb service nova] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.657580] env[61356]: DEBUG nova.compute.manager [req-5e090e60-5ce2-4d72-b73b-ba56ab175a59 req-7b42ae6f-540c-42d1-b51e-57876782cefb service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] No waiting events found dispatching network-vif-plugged-4f110d82-c2fb-4503-b374-15f332510a7d {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1334.657743] env[61356]: WARNING nova.compute.manager [req-5e090e60-5ce2-4d72-b73b-ba56ab175a59 req-7b42ae6f-540c-42d1-b51e-57876782cefb service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Received unexpected event network-vif-plugged-4f110d82-c2fb-4503-b374-15f332510a7d for instance with vm_state building and task_state spawning. [ 1334.859379] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Updating instance_info_cache with network_info: [{"id": "4f110d82-c2fb-4503-b374-15f332510a7d", "address": "fa:16:3e:f5:93:cf", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f110d82-c2", "ovs_interfaceid": "4f110d82-c2fb-4503-b374-15f332510a7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.871609] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "refresh_cache-f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.871609] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Instance network_info: |[{"id": "4f110d82-c2fb-4503-b374-15f332510a7d", "address": "fa:16:3e:f5:93:cf", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f110d82-c2", "ovs_interfaceid": "4f110d82-c2fb-4503-b374-15f332510a7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1334.872153] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:93:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae4e3171-21cd-4094-b6cf-81bf366c75bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f110d82-c2fb-4503-b374-15f332510a7d', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1334.879480] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating folder: Project (3fe9dfd77dff47cab342d5ebb764e770). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1334.880466] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69ab9010-8599-4b25-b7c6-e9233f82c23a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.895094] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Created folder: Project (3fe9dfd77dff47cab342d5ebb764e770) in parent group-v846926. [ 1334.895310] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating folder: Instances. Parent ref: group-v847009. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1334.895891] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d9879e2-f3d7-4b85-9e5b-02addd311def {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.904471] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Created folder: Instances in parent group-v847009. [ 1334.904800] env[61356]: DEBUG oslo.service.loopingcall [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1334.904898] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1334.905106] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35896113-5570-4c25-98a4-b1b3b0065319 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.926481] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1334.926481] env[61356]: value = "task-4302602" [ 1334.926481] env[61356]: _type = "Task" [ 1334.926481] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.934752] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302602, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.436353] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302602, 'name': CreateVM_Task, 'duration_secs': 0.276986} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.436477] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1335.437173] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.437342] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.437671] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1335.437944] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-113a021b-b528-45fe-925d-658dd78d3388 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.442016] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 1335.442016] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52afec99-01ad-f582-31ff-7be3a85abe22" [ 1335.442016] env[61356]: _type = "Task" [ 1335.442016] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.450238] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52afec99-01ad-f582-31ff-7be3a85abe22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.952522] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.952849] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1335.952926] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.684138] env[61356]: DEBUG nova.compute.manager [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Received event network-changed-4f110d82-c2fb-4503-b374-15f332510a7d {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1336.684138] env[61356]: DEBUG nova.compute.manager [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Refreshing instance network info cache due to event network-changed-4f110d82-c2fb-4503-b374-15f332510a7d. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1336.684317] env[61356]: DEBUG oslo_concurrency.lockutils [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] Acquiring lock "refresh_cache-f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.684554] env[61356]: DEBUG oslo_concurrency.lockutils [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] Acquired lock "refresh_cache-f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.684611] env[61356]: DEBUG nova.network.neutron [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Refreshing network info cache for port 4f110d82-c2fb-4503-b374-15f332510a7d {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1337.042213] env[61356]: DEBUG nova.network.neutron [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Updated VIF entry in instance network info cache for port 4f110d82-c2fb-4503-b374-15f332510a7d. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1337.042575] env[61356]: DEBUG nova.network.neutron [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Updating instance_info_cache with network_info: [{"id": "4f110d82-c2fb-4503-b374-15f332510a7d", "address": "fa:16:3e:f5:93:cf", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f110d82-c2", "ovs_interfaceid": "4f110d82-c2fb-4503-b374-15f332510a7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.051717] env[61356]: DEBUG oslo_concurrency.lockutils [req-ddc5dc58-3996-4f97-85f9-780b7252e8cf req-ffda91e7-0aeb-4de7-96aa-b8869873db2b service nova] Releasing lock "refresh_cache-f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.758492] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.957893] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.758703] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1343.772634] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.772851] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.773201] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.773201] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1343.774373] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6451ea23-2e70-44e4-8f2f-7c030cc10ac2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.783450] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1dad939-423e-4f9c-9e6e-8dc4abdb1f40 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.796974] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59f7209-1fda-4525-b45f-79024c0f223d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.803122] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8294df21-3bcf-48d9-901b-429e782d245f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.833043] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180665MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1343.833226] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.833431] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.931246] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.931414] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.931546] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.931671] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.931793] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.931912] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.932053] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.932239] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.932372] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.932491] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.944059] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bca58b74-2a67-4943-b6f3-52dec9227ceb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1343.954071] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bec41d8f-c7c9-4c23-af43-0ef4816f631e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1343.963528] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d512abed-9776-449c-9de0-00925d888964 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1343.972515] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1c4d0b51-253c-4f0f-8c83-b341900afd28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1343.981388] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1343.990141] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance b23c7c3d-86d9-4b3c-b121-099833985d7e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1343.998710] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 15e801d1-823a-4a9c-876a-c60fa6c686f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1344.006903] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7170bb5-0399-44d5-adf0-6eca96e5f7b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1344.015250] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1344.024109] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance e4d2004e-9c64-44e7-b510-895ae532e546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1344.033489] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1344.042193] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1344.042421] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1344.042566] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1344.297867] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a9ab04-cad0-4707-a876-d5e42ae500d4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.305946] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acd1ab3-817d-44fc-a33d-eaa65fdb8567 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.335870] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95655434-2ea4-4069-97bb-7ff081590588 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.343073] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c872fbf-72d3-4126-b511-86ee11a0898f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.355561] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.363958] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1344.376790] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1344.376966] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.544s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.371630] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.371932] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.758538] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.758526] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1347.758828] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.758780] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1350.759111] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1351.759958] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.760271] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1351.760337] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1351.783522] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.783673] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.783801] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.783926] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.784059] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.784183] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.784305] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.784422] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.784537] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.784652] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1351.784767] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1353.779126] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.833490] env[61356]: DEBUG oslo_concurrency.lockutils [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.490549] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "9958f378-1cab-49bf-8375-5e95c16401e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.490885] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "9958f378-1cab-49bf-8375-5e95c16401e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.380416] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.380734] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.870808] env[61356]: WARNING oslo_vmware.rw_handles [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1380.870808] env[61356]: ERROR oslo_vmware.rw_handles [ 1380.871352] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1380.873127] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1380.873377] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Copying Virtual Disk [datastore2] vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/a69e013f-976b-4629-ab7c-0a9eed5dace6/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1380.873668] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8e4304b-40ae-4abe-bf04-f3c217b6a175 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.881684] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Waiting for the task: (returnval){ [ 1380.881684] env[61356]: value = "task-4302603" [ 1380.881684] env[61356]: _type = "Task" [ 1380.881684] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.888875] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Task: {'id': task-4302603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.392523] env[61356]: DEBUG oslo_vmware.exceptions [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1381.392827] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.393417] env[61356]: ERROR nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.393417] env[61356]: Faults: ['InvalidArgument'] [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Traceback (most recent call last): [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] yield resources [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self.driver.spawn(context, instance, image_meta, [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self._fetch_image_if_missing(context, vi) [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] image_cache(vi, tmp_image_ds_loc) [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] vm_util.copy_virtual_disk( [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] session._wait_for_task(vmdk_copy_task) [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] return self.wait_for_task(task_ref) [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] return evt.wait() [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] result = hub.switch() [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] return self.greenlet.switch() [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self.f(*self.args, **self.kw) [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] raise exceptions.translate_fault(task_info.error) [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Faults: ['InvalidArgument'] [ 1381.393417] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] [ 1381.394310] env[61356]: INFO nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Terminating instance [ 1381.395283] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.395500] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.396182] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1381.396373] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1381.396605] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d52e3b01-3c2a-4d2c-97ad-9dc0c097679c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.398865] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2842d2-87f1-4b6a-8116-365e4efd9258 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.405324] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1381.405540] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42775b2c-4f65-4628-b4d7-42192ddb278d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.407657] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.407828] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1381.408771] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19a9a9c4-cbbd-4e3b-83a5-114a7f2b0890 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.413849] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 1381.413849] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c46924-5052-d644-9546-8f852ebfc292" [ 1381.413849] env[61356]: _type = "Task" [ 1381.413849] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.422753] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c46924-5052-d644-9546-8f852ebfc292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.476240] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1381.476532] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1381.476719] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Deleting the datastore file [datastore2] 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1381.476981] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfdb2fab-782b-46d0-91cf-86a7a1b8d42c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.483251] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Waiting for the task: (returnval){ [ 1381.483251] env[61356]: value = "task-4302605" [ 1381.483251] env[61356]: _type = "Task" [ 1381.483251] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.490534] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Task: {'id': task-4302605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.925334] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1381.925585] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating directory with path [datastore2] vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.925797] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e32f4274-f523-4356-925e-22a176df242f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.937388] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Created directory with path [datastore2] vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.937580] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Fetch image to [datastore2] vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1381.937739] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1381.938480] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438a407b-36d9-4d2c-b095-b51b953047ea {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.944838] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b383ef-2ea5-4144-a200-ef955fd47a73 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.953748] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f472fc-9716-40b7-9cf7-deea7f2432f9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.988553] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9eb935d-b5b6-44ba-b195-7a9f7c1e672f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.996971] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-beb7d89b-0e04-4786-96f6-bcd3cf0ae1f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.998639] env[61356]: DEBUG oslo_vmware.api [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Task: {'id': task-4302605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070849} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.998885] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1381.999090] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1381.999246] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1381.999422] env[61356]: INFO nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1382.001549] env[61356]: DEBUG nova.compute.claims [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1382.001732] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.001943] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.019739] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1382.071743] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1382.133795] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1382.133795] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1382.352515] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc02f4ef-080a-413b-9fc6-fb53a28729aa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.361566] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f302ee82-529b-437e-9f59-bbbc3c660441 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.390494] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe68d8b-2cbe-4a5e-8871-27b0a6621ced {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.396829] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbb4f1a-c56f-4f16-a376-c635ef15c323 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.410097] env[61356]: DEBUG nova.compute.provider_tree [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.418807] env[61356]: DEBUG nova.scheduler.client.report [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.432719] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.431s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.433209] env[61356]: ERROR nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1382.433209] env[61356]: Faults: ['InvalidArgument'] [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Traceback (most recent call last): [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self.driver.spawn(context, instance, image_meta, [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self._fetch_image_if_missing(context, vi) [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] image_cache(vi, tmp_image_ds_loc) [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] vm_util.copy_virtual_disk( [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] session._wait_for_task(vmdk_copy_task) [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] return self.wait_for_task(task_ref) [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] return evt.wait() [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] result = hub.switch() [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] return self.greenlet.switch() [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] self.f(*self.args, **self.kw) [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] raise exceptions.translate_fault(task_info.error) [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Faults: ['InvalidArgument'] [ 1382.433209] env[61356]: ERROR nova.compute.manager [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] [ 1382.434530] env[61356]: DEBUG nova.compute.utils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1382.435402] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Build of instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 was re-scheduled: A specified parameter was not correct: fileType [ 1382.435402] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1382.436012] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1382.436202] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1382.436373] env[61356]: DEBUG nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1382.436535] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1382.987985] env[61356]: DEBUG nova.network.neutron [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.999369] env[61356]: INFO nova.compute.manager [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Took 0.56 seconds to deallocate network for instance. [ 1383.099971] env[61356]: INFO nova.scheduler.client.report [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Deleted allocations for instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 [ 1383.122685] env[61356]: DEBUG oslo_concurrency.lockutils [None req-489f8435-a92f-4756-a5d3-ac12bff9b887 tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 589.166s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.123797] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.200s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.124032] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Acquiring lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.124237] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.124403] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.126541] env[61356]: INFO nova.compute.manager [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Terminating instance [ 1383.128512] env[61356]: DEBUG nova.compute.manager [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1383.128676] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1383.129454] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3830e07a-a72f-440c-a737-2775d5f68039 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.138066] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae785ba-ffc4-4612-96c9-8797756ce7a2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.148909] env[61356]: DEBUG nova.compute.manager [None req-6a0fba7f-fb95-4f73-aedc-84e96903ee05 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] [instance: bca58b74-2a67-4943-b6f3-52dec9227ceb] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1383.168445] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ba1ce81-6bb5-457c-8a8b-f91da2b71684 could not be found. [ 1383.168647] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1383.168819] env[61356]: INFO nova.compute.manager [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1383.169061] env[61356]: DEBUG oslo.service.loopingcall [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.169275] env[61356]: DEBUG nova.compute.manager [-] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1383.169367] env[61356]: DEBUG nova.network.neutron [-] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1383.173489] env[61356]: DEBUG nova.compute.manager [None req-6a0fba7f-fb95-4f73-aedc-84e96903ee05 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] [instance: bca58b74-2a67-4943-b6f3-52dec9227ceb] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1383.196874] env[61356]: DEBUG nova.network.neutron [-] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.199104] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6a0fba7f-fb95-4f73-aedc-84e96903ee05 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Lock "bca58b74-2a67-4943-b6f3-52dec9227ceb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.028s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.207038] env[61356]: INFO nova.compute.manager [-] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] Took 0.04 seconds to deallocate network for instance. [ 1383.212072] env[61356]: DEBUG nova.compute.manager [None req-2863887e-2541-4286-9805-95cb0e90e05e tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] [instance: bec41d8f-c7c9-4c23-af43-0ef4816f631e] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1383.235031] env[61356]: DEBUG nova.compute.manager [None req-2863887e-2541-4286-9805-95cb0e90e05e tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] [instance: bec41d8f-c7c9-4c23-af43-0ef4816f631e] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1383.264256] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2863887e-2541-4286-9805-95cb0e90e05e tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Lock "bec41d8f-c7c9-4c23-af43-0ef4816f631e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.661s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.274298] env[61356]: DEBUG nova.compute.manager [None req-ce4fbea7-96ba-4519-a83c-b387e03efa28 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] [instance: d512abed-9776-449c-9de0-00925d888964] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1383.305624] env[61356]: DEBUG nova.compute.manager [None req-ce4fbea7-96ba-4519-a83c-b387e03efa28 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] [instance: d512abed-9776-449c-9de0-00925d888964] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1383.321492] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9aaf2c86-5f90-4e8d-8318-72afcdc9513b tempest-VolumesAdminNegativeTest-1683973759 tempest-VolumesAdminNegativeTest-1683973759-project-member] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.322984] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 264.283s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.323188] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4ba1ce81-6bb5-457c-8a8b-f91da2b71684] During sync_power_state the instance has a pending task (deleting). Skip. [ 1383.323353] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "4ba1ce81-6bb5-457c-8a8b-f91da2b71684" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.334388] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce4fbea7-96ba-4519-a83c-b387e03efa28 tempest-ListServerFiltersTestJSON-478204490 tempest-ListServerFiltersTestJSON-478204490-project-member] Lock "d512abed-9776-449c-9de0-00925d888964" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.111s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.347074] env[61356]: DEBUG nova.compute.manager [None req-2f54bebd-912e-4168-827a-f20cf4fd8a12 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 1c4d0b51-253c-4f0f-8c83-b341900afd28] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1383.372279] env[61356]: DEBUG nova.compute.manager [None req-2f54bebd-912e-4168-827a-f20cf4fd8a12 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 1c4d0b51-253c-4f0f-8c83-b341900afd28] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1383.394754] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2f54bebd-912e-4168-827a-f20cf4fd8a12 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "1c4d0b51-253c-4f0f-8c83-b341900afd28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.557s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.404288] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1383.455928] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.456196] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.457837] env[61356]: INFO nova.compute.claims [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1383.712665] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccda853a-5c81-4510-b2cd-c1d10efc9307 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.720644] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8129b6bd-ab28-4c34-bfcf-e3707a63b859 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.749789] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b43b97-c67f-4191-b390-6935d23150b9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.757158] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec30aa63-e087-40de-9748-c6fea00a2bbd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.772208] env[61356]: DEBUG nova.compute.provider_tree [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.781057] env[61356]: DEBUG nova.scheduler.client.report [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1383.798228] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.342s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.798771] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1383.835598] env[61356]: DEBUG nova.compute.utils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1383.836922] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1383.837116] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1383.847043] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1383.913209] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1383.946247] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1383.946364] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1383.946472] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.946653] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1383.946798] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.947035] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1383.947312] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1383.947533] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1383.947780] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1383.947989] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1383.948246] env[61356]: DEBUG nova.virt.hardware [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1383.949471] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda709fe-95ae-42e4-b530-30e7119febb7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.955549] env[61356]: DEBUG nova.policy [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8169720a3ed44a30a59e3fa92c7d09d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '012c8be88c84442583defbc9f87174fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1383.960227] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a01926-616e-4188-82d8-721ab715bc72 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.358140] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Successfully created port: 4ecd50b7-00e3-440a-be4d-449297ea59f4 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1385.450851] env[61356]: DEBUG nova.compute.manager [req-c2d80610-5a5e-4257-a9a6-43134de53230 req-0f7c94a8-fd0c-4998-aa58-c9148ac22a86 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Received event network-vif-plugged-4ecd50b7-00e3-440a-be4d-449297ea59f4 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1385.451128] env[61356]: DEBUG oslo_concurrency.lockutils [req-c2d80610-5a5e-4257-a9a6-43134de53230 req-0f7c94a8-fd0c-4998-aa58-c9148ac22a86 service nova] Acquiring lock "f8e4391b-3224-42a7-9962-eaaf2230d70d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.451542] env[61356]: DEBUG oslo_concurrency.lockutils [req-c2d80610-5a5e-4257-a9a6-43134de53230 req-0f7c94a8-fd0c-4998-aa58-c9148ac22a86 service nova] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.451542] env[61356]: DEBUG oslo_concurrency.lockutils [req-c2d80610-5a5e-4257-a9a6-43134de53230 req-0f7c94a8-fd0c-4998-aa58-c9148ac22a86 service nova] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.451650] env[61356]: DEBUG nova.compute.manager [req-c2d80610-5a5e-4257-a9a6-43134de53230 req-0f7c94a8-fd0c-4998-aa58-c9148ac22a86 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] No waiting events found dispatching network-vif-plugged-4ecd50b7-00e3-440a-be4d-449297ea59f4 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1385.452369] env[61356]: WARNING nova.compute.manager [req-c2d80610-5a5e-4257-a9a6-43134de53230 req-0f7c94a8-fd0c-4998-aa58-c9148ac22a86 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Received unexpected event network-vif-plugged-4ecd50b7-00e3-440a-be4d-449297ea59f4 for instance with vm_state building and task_state spawning. [ 1385.523254] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Successfully updated port: 4ecd50b7-00e3-440a-be4d-449297ea59f4 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1385.532978] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "refresh_cache-f8e4391b-3224-42a7-9962-eaaf2230d70d" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.533155] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquired lock "refresh_cache-f8e4391b-3224-42a7-9962-eaaf2230d70d" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.533307] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1385.601051] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1385.847104] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Updating instance_info_cache with network_info: [{"id": "4ecd50b7-00e3-440a-be4d-449297ea59f4", "address": "fa:16:3e:3f:df:13", "network": {"id": "e75ff3bf-6807-49a7-ba25-5c6a18efcedf", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1386871659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "012c8be88c84442583defbc9f87174fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ecd50b7-00", "ovs_interfaceid": "4ecd50b7-00e3-440a-be4d-449297ea59f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.865680] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Releasing lock "refresh_cache-f8e4391b-3224-42a7-9962-eaaf2230d70d" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.865993] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Instance network_info: |[{"id": "4ecd50b7-00e3-440a-be4d-449297ea59f4", "address": "fa:16:3e:3f:df:13", "network": {"id": "e75ff3bf-6807-49a7-ba25-5c6a18efcedf", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1386871659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "012c8be88c84442583defbc9f87174fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ecd50b7-00", "ovs_interfaceid": "4ecd50b7-00e3-440a-be4d-449297ea59f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1385.866422] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:df:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '360308f4-9d0a-4ec2-8bcf-44891f452847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ecd50b7-00e3-440a-be4d-449297ea59f4', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1385.874364] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Creating folder: Project (012c8be88c84442583defbc9f87174fd). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1385.874913] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-781f4a09-69e2-4c83-9bdd-005b2fc41737 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.885636] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Created folder: Project (012c8be88c84442583defbc9f87174fd) in parent group-v846926. [ 1385.885819] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Creating folder: Instances. Parent ref: group-v847012. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1385.886048] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50ff2fc0-8ac1-49cd-9564-d4214d2d11b5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.894198] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Created folder: Instances in parent group-v847012. [ 1385.894457] env[61356]: DEBUG oslo.service.loopingcall [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1385.894660] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1385.894888] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-862494bb-a529-443c-89cf-75bf6a49dbbd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.913323] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1385.913323] env[61356]: value = "task-4302608" [ 1385.913323] env[61356]: _type = "Task" [ 1385.913323] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.921217] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302608, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.424326] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302608, 'name': CreateVM_Task, 'duration_secs': 0.269271} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.424469] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1386.425103] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.425280] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.425582] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1386.425825] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fc1a6fa-0012-43e7-acbd-3fa26733fc63 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.430041] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Waiting for the task: (returnval){ [ 1386.430041] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]528681ea-8a9a-c055-77d4-b127d7d8a4a9" [ 1386.430041] env[61356]: _type = "Task" [ 1386.430041] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.437102] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]528681ea-8a9a-c055-77d4-b127d7d8a4a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.940895] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.940895] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1386.940895] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.512504] env[61356]: DEBUG nova.compute.manager [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Received event network-changed-4ecd50b7-00e3-440a-be4d-449297ea59f4 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1387.512700] env[61356]: DEBUG nova.compute.manager [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Refreshing instance network info cache due to event network-changed-4ecd50b7-00e3-440a-be4d-449297ea59f4. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1387.512923] env[61356]: DEBUG oslo_concurrency.lockutils [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] Acquiring lock "refresh_cache-f8e4391b-3224-42a7-9962-eaaf2230d70d" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.513062] env[61356]: DEBUG oslo_concurrency.lockutils [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] Acquired lock "refresh_cache-f8e4391b-3224-42a7-9962-eaaf2230d70d" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.513229] env[61356]: DEBUG nova.network.neutron [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Refreshing network info cache for port 4ecd50b7-00e3-440a-be4d-449297ea59f4 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1387.859520] env[61356]: DEBUG nova.network.neutron [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Updated VIF entry in instance network info cache for port 4ecd50b7-00e3-440a-be4d-449297ea59f4. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1387.859899] env[61356]: DEBUG nova.network.neutron [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Updating instance_info_cache with network_info: [{"id": "4ecd50b7-00e3-440a-be4d-449297ea59f4", "address": "fa:16:3e:3f:df:13", "network": {"id": "e75ff3bf-6807-49a7-ba25-5c6a18efcedf", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1386871659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "012c8be88c84442583defbc9f87174fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ecd50b7-00", "ovs_interfaceid": "4ecd50b7-00e3-440a-be4d-449297ea59f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.870024] env[61356]: DEBUG oslo_concurrency.lockutils [req-15b00e58-8be9-4e4c-9887-8cf650f65acd req-bbdde71d-c9f7-4622-9696-7f49ef3e3203 service nova] Releasing lock "refresh_cache-f8e4391b-3224-42a7-9962-eaaf2230d70d" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.024974] env[61356]: DEBUG oslo_concurrency.lockutils [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.847148] env[61356]: DEBUG oslo_concurrency.lockutils [None req-be4f8ff9-6822-4cf7-97fd-34bedac59cf2 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.847374] env[61356]: DEBUG oslo_concurrency.lockutils [None req-be4f8ff9-6822-4cf7-97fd-34bedac59cf2 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.102189] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1279fcb8-504e-4f88-a854-a11f6c5e341b tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "aa69cd0e-f97a-4c63-8887-9998847329ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.102447] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1279fcb8-504e-4f88-a854-a11f6c5e341b tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "aa69cd0e-f97a-4c63-8887-9998847329ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.758666] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.759028] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1405.767029] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1405.777140] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.777363] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.777531] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.777699] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1405.778897] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbe2de7-3963-4fb7-ac9a-c4ae0f54adee {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.787812] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b47e0a9-89ed-4a2f-a740-0704ad0d3c64 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.801625] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e05503-b40b-44d8-b055-efd30fa5a461 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.808522] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cf83d8-36ed-4dbd-8a75-29304c27b652 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.836772] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180647MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1405.837020] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.837220] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.916611] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1353d492-e9f0-4495-83e1-6832bb03d5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.916704] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.916844] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.916895] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.917054] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.917234] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.917332] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.917725] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.917884] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.918019] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1405.932695] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.943703] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance e4d2004e-9c64-44e7-b510-895ae532e546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.954820] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.966024] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.976750] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.988346] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.998988] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1406.009885] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance aa69cd0e-f97a-4c63-8887-9998847329ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1406.010130] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1406.010285] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1406.235647] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbebb2d-a26a-4edf-8743-ab72664a9fc2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.243436] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc92d5dd-d62f-4522-8a5d-c8d8ffa25f06 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.273416] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c55ca3-3b3f-4043-afeb-86388cdb6dab {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.279980] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d58a176-76d9-4c9b-91a7-660d991391c7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.292249] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.301160] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1406.314384] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1406.314559] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.477s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.306610] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.754503] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.758044] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.758150] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1407.771065] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] There are 0 instances to clean {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1408.771675] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.772063] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.772127] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.758808] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1410.758808] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1413.758709] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.758995] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1413.758995] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1413.782398] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.782559] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.782687] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.782811] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.782934] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.783098] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.783219] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.783348] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.783464] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.783581] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1413.783698] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1419.759376] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1419.759734] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances with incomplete migration {{(pid=61356) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1430.889843] env[61356]: WARNING oslo_vmware.rw_handles [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1430.889843] env[61356]: ERROR oslo_vmware.rw_handles [ 1430.890420] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1430.892246] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1430.892495] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Copying Virtual Disk [datastore2] vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/01577ae1-0363-438f-bf1e-99052500a57f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1430.892787] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe36a5a4-eff1-4495-be88-ffc1a8bba81e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.900796] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 1430.900796] env[61356]: value = "task-4302609" [ 1430.900796] env[61356]: _type = "Task" [ 1430.900796] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.908592] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': task-4302609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.412080] env[61356]: DEBUG oslo_vmware.exceptions [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1431.412080] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.412747] env[61356]: ERROR nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1431.412747] env[61356]: Faults: ['InvalidArgument'] [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Traceback (most recent call last): [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] yield resources [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self.driver.spawn(context, instance, image_meta, [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self._fetch_image_if_missing(context, vi) [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] image_cache(vi, tmp_image_ds_loc) [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] vm_util.copy_virtual_disk( [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] session._wait_for_task(vmdk_copy_task) [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] return self.wait_for_task(task_ref) [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] return evt.wait() [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] result = hub.switch() [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] return self.greenlet.switch() [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self.f(*self.args, **self.kw) [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] raise exceptions.translate_fault(task_info.error) [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Faults: ['InvalidArgument'] [ 1431.412747] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] [ 1431.413756] env[61356]: INFO nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Terminating instance [ 1431.414634] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.414843] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.415104] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac37c0fd-910a-463b-86ca-90dc0898c03b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.417690] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1431.417884] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1431.418721] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd68cf7b-fc55-4e81-8268-1ada620301af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.425730] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1431.425955] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5de9408-4637-414d-8790-639434c83a8e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.428152] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.428330] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1431.429303] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dd7c8c8-ad71-43be-9089-83d7a7fa22ac {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.434018] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Waiting for the task: (returnval){ [ 1431.434018] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f8e1df-4906-1b54-a3d2-3bcbb9e10d17" [ 1431.434018] env[61356]: _type = "Task" [ 1431.434018] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.441434] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52f8e1df-4906-1b54-a3d2-3bcbb9e10d17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.503269] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1431.503485] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1431.504008] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Deleting the datastore file [datastore2] 1353d492-e9f0-4495-83e1-6832bb03d5db {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.504008] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-652daeba-906f-43cd-9171-1b37bb646f16 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.510676] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for the task: (returnval){ [ 1431.510676] env[61356]: value = "task-4302611" [ 1431.510676] env[61356]: _type = "Task" [ 1431.510676] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.518560] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': task-4302611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.944486] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1431.944838] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Creating directory with path [datastore2] vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1431.944922] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-606f2151-9e1c-426c-939b-feb9b618b0dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.956279] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Created directory with path [datastore2] vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1431.956520] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Fetch image to [datastore2] vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1431.956734] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1431.957490] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4261b0d8-0502-4a0b-bd97-dfcaf6757f3b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.963794] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02894bb-b776-4735-8dd5-db167988cd47 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.972675] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b503e404-855c-4971-945a-e7c4b5fcfa25 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.003570] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13c06cc-fa12-4c24-a4ca-a2b08dc3f33d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.008987] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f67b67ee-fb69-4876-a8f0-458c08d61e22 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.018802] env[61356]: DEBUG oslo_vmware.api [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Task: {'id': task-4302611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071719} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.019111] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.019355] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1432.019577] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1432.019802] env[61356]: INFO nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1432.022140] env[61356]: DEBUG nova.compute.claims [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1432.022323] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.022548] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.035341] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1432.114288] env[61356]: DEBUG oslo_vmware.rw_handles [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1432.180289] env[61356]: DEBUG oslo_vmware.rw_handles [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1432.180289] env[61356]: DEBUG oslo_vmware.rw_handles [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1432.410575] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f1c1ec-021f-4382-8d5d-3c064269c9e6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.418681] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96239d01-75b4-493e-b279-4a2cf9e13aea {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.452066] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b6e234-90f2-4c7f-b95a-358ec6aa46f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.463877] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d53c55-8b6e-4e25-917f-c33346a0dda2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.476939] env[61356]: DEBUG nova.compute.provider_tree [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.489149] env[61356]: DEBUG nova.scheduler.client.report [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1432.503914] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.481s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.504477] env[61356]: ERROR nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1432.504477] env[61356]: Faults: ['InvalidArgument'] [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Traceback (most recent call last): [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self.driver.spawn(context, instance, image_meta, [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self._fetch_image_if_missing(context, vi) [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] image_cache(vi, tmp_image_ds_loc) [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] vm_util.copy_virtual_disk( [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] session._wait_for_task(vmdk_copy_task) [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] return self.wait_for_task(task_ref) [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] return evt.wait() [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] result = hub.switch() [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] return self.greenlet.switch() [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] self.f(*self.args, **self.kw) [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] raise exceptions.translate_fault(task_info.error) [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Faults: ['InvalidArgument'] [ 1432.504477] env[61356]: ERROR nova.compute.manager [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] [ 1432.505341] env[61356]: DEBUG nova.compute.utils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1432.509736] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Build of instance 1353d492-e9f0-4495-83e1-6832bb03d5db was re-scheduled: A specified parameter was not correct: fileType [ 1432.509736] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1432.510152] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1432.510330] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1432.510501] env[61356]: DEBUG nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1432.510662] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1432.828470] env[61356]: DEBUG nova.network.neutron [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.849402] env[61356]: INFO nova.compute.manager [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Took 0.34 seconds to deallocate network for instance. [ 1433.001832] env[61356]: INFO nova.scheduler.client.report [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Deleted allocations for instance 1353d492-e9f0-4495-83e1-6832bb03d5db [ 1433.026226] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7d21cd4c-19c9-4ed6-ac52-5940d581f9d1 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.181s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.027485] env[61356]: DEBUG oslo_concurrency.lockutils [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.783s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.027683] env[61356]: DEBUG oslo_concurrency.lockutils [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Acquiring lock "1353d492-e9f0-4495-83e1-6832bb03d5db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.027908] env[61356]: DEBUG oslo_concurrency.lockutils [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.028088] env[61356]: DEBUG oslo_concurrency.lockutils [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.032857] env[61356]: INFO nova.compute.manager [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Terminating instance [ 1433.034848] env[61356]: DEBUG nova.compute.manager [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1433.034958] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1433.035264] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5e5618b-5716-4a69-a53a-f387ce9eb999 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.044410] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b2a69d-b238-4792-9f71-a4e710ea7edd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.055708] env[61356]: DEBUG nova.compute.manager [None req-46d4c536-c4cb-4ba6-89e3-934787724c63 tempest-ServerShowV257Test-376094274 tempest-ServerShowV257Test-376094274-project-member] [instance: b23c7c3d-86d9-4b3c-b121-099833985d7e] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.076935] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1353d492-e9f0-4495-83e1-6832bb03d5db could not be found. [ 1433.077180] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1433.077362] env[61356]: INFO nova.compute.manager [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1433.077626] env[61356]: DEBUG oslo.service.loopingcall [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1433.077856] env[61356]: DEBUG nova.compute.manager [-] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1433.077950] env[61356]: DEBUG nova.network.neutron [-] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1433.088919] env[61356]: DEBUG nova.compute.manager [None req-46d4c536-c4cb-4ba6-89e3-934787724c63 tempest-ServerShowV257Test-376094274 tempest-ServerShowV257Test-376094274-project-member] [instance: b23c7c3d-86d9-4b3c-b121-099833985d7e] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.108240] env[61356]: DEBUG nova.network.neutron [-] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.113265] env[61356]: DEBUG oslo_concurrency.lockutils [None req-46d4c536-c4cb-4ba6-89e3-934787724c63 tempest-ServerShowV257Test-376094274 tempest-ServerShowV257Test-376094274-project-member] Lock "b23c7c3d-86d9-4b3c-b121-099833985d7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.666s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.117883] env[61356]: INFO nova.compute.manager [-] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] Took 0.04 seconds to deallocate network for instance. [ 1433.127572] env[61356]: DEBUG nova.compute.manager [None req-ef8f33af-6ae8-4bac-8cc4-f14f9a8b10ef tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 15e801d1-823a-4a9c-876a-c60fa6c686f6] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.152849] env[61356]: DEBUG nova.compute.manager [None req-ef8f33af-6ae8-4bac-8cc4-f14f9a8b10ef tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 15e801d1-823a-4a9c-876a-c60fa6c686f6] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.187155] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ef8f33af-6ae8-4bac-8cc4-f14f9a8b10ef tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "15e801d1-823a-4a9c-876a-c60fa6c686f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.903s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.198023] env[61356]: DEBUG nova.compute.manager [None req-ea2cdbb9-7937-400a-a46a-b0c52678ce3d tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] [instance: f7170bb5-0399-44d5-adf0-6eca96e5f7b3] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.224133] env[61356]: DEBUG nova.compute.manager [None req-ea2cdbb9-7937-400a-a46a-b0c52678ce3d tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] [instance: f7170bb5-0399-44d5-adf0-6eca96e5f7b3] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1433.241616] env[61356]: DEBUG oslo_concurrency.lockutils [None req-380af23a-7548-4939-a0e8-1014a711b581 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.214s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.242867] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 314.202s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.243396] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 1353d492-e9f0-4495-83e1-6832bb03d5db] During sync_power_state the instance has a pending task (deleting). Skip. [ 1433.243629] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "1353d492-e9f0-4495-83e1-6832bb03d5db" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.254758] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ea2cdbb9-7937-400a-a46a-b0c52678ce3d tempest-AttachVolumeTestJSON-1846891283 tempest-AttachVolumeTestJSON-1846891283-project-member] Lock "f7170bb5-0399-44d5-adf0-6eca96e5f7b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.534s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.265077] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1433.327307] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.327701] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1433.330828] env[61356]: INFO nova.compute.claims [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1433.605137] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecdc2ae-f32a-409d-aa95-337cd10d2c86 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.613689] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b27be8-812a-472d-be6c-795f35c791de {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.643160] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b37c3fe-2872-47da-8338-521a8a321a47 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.650846] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627c1ae9-20e5-4c64-8f2b-d85bfa0be088 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.664211] env[61356]: DEBUG nova.compute.provider_tree [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.676442] env[61356]: DEBUG nova.scheduler.client.report [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1433.691717] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.364s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.692326] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1433.727114] env[61356]: DEBUG nova.compute.utils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1433.728930] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1433.729202] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1433.739163] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1433.826906] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1433.852930] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1433.853189] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1433.853349] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1433.853536] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1433.853679] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1433.853825] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1433.854136] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1433.854366] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1433.854595] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1433.854818] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1433.855066] env[61356]: DEBUG nova.virt.hardware [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1433.855988] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300c5776-8da5-4bba-b8f0-2c4b027dd98f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.861373] env[61356]: DEBUG nova.policy [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '71c2c7e9099b428eaac2b97bb3e73b79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb250b900f32424fb8422cf75824d0d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1433.866792] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97874f55-2a74-4c40-9ceb-4a1b3a3b6c9d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.315938] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Successfully created port: 914f2066-eafd-4744-abb3-47e0e60e172f {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1435.501870] env[61356]: DEBUG nova.compute.manager [req-ae55a8c4-b50e-4dc3-bcc5-0242c39cd5ca req-3a7274a4-07e7-4abf-95a5-3a661adb099d service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Received event network-vif-plugged-914f2066-eafd-4744-abb3-47e0e60e172f {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1435.501870] env[61356]: DEBUG oslo_concurrency.lockutils [req-ae55a8c4-b50e-4dc3-bcc5-0242c39cd5ca req-3a7274a4-07e7-4abf-95a5-3a661adb099d service nova] Acquiring lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.501870] env[61356]: DEBUG oslo_concurrency.lockutils [req-ae55a8c4-b50e-4dc3-bcc5-0242c39cd5ca req-3a7274a4-07e7-4abf-95a5-3a661adb099d service nova] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.501870] env[61356]: DEBUG oslo_concurrency.lockutils [req-ae55a8c4-b50e-4dc3-bcc5-0242c39cd5ca req-3a7274a4-07e7-4abf-95a5-3a661adb099d service nova] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.501870] env[61356]: DEBUG nova.compute.manager [req-ae55a8c4-b50e-4dc3-bcc5-0242c39cd5ca req-3a7274a4-07e7-4abf-95a5-3a661adb099d service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] No waiting events found dispatching network-vif-plugged-914f2066-eafd-4744-abb3-47e0e60e172f {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1435.501870] env[61356]: WARNING nova.compute.manager [req-ae55a8c4-b50e-4dc3-bcc5-0242c39cd5ca req-3a7274a4-07e7-4abf-95a5-3a661adb099d service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Received unexpected event network-vif-plugged-914f2066-eafd-4744-abb3-47e0e60e172f for instance with vm_state building and task_state spawning. [ 1435.578466] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Successfully updated port: 914f2066-eafd-4744-abb3-47e0e60e172f {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1435.592682] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "refresh_cache-3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.592830] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquired lock "refresh_cache-3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.594127] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1435.659811] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1435.945396] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Updating instance_info_cache with network_info: [{"id": "914f2066-eafd-4744-abb3-47e0e60e172f", "address": "fa:16:3e:6a:4f:90", "network": {"id": "c09a182d-1c2b-468c-97c8-2484556cdb67", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2075268749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb250b900f32424fb8422cf75824d0d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap914f2066-ea", "ovs_interfaceid": "914f2066-eafd-4744-abb3-47e0e60e172f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.958140] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Releasing lock "refresh_cache-3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.958140] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Instance network_info: |[{"id": "914f2066-eafd-4744-abb3-47e0e60e172f", "address": "fa:16:3e:6a:4f:90", "network": {"id": "c09a182d-1c2b-468c-97c8-2484556cdb67", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2075268749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb250b900f32424fb8422cf75824d0d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap914f2066-ea", "ovs_interfaceid": "914f2066-eafd-4744-abb3-47e0e60e172f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1435.958521] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:4f:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '914f2066-eafd-4744-abb3-47e0e60e172f', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1435.966455] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Creating folder: Project (eb250b900f32424fb8422cf75824d0d3). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1435.967065] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32cfd6ae-c924-43a2-8e7a-ee34ac1a4943 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.979315] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Created folder: Project (eb250b900f32424fb8422cf75824d0d3) in parent group-v846926. [ 1435.979506] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Creating folder: Instances. Parent ref: group-v847015. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1435.979740] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2032ee9f-909f-4c44-af68-90ad94794802 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.988318] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Created folder: Instances in parent group-v847015. [ 1435.988420] env[61356]: DEBUG oslo.service.loopingcall [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1435.988556] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1435.988759] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49194c38-6780-4f63-aa84-c5072b948c2f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.010625] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1436.010625] env[61356]: value = "task-4302614" [ 1436.010625] env[61356]: _type = "Task" [ 1436.010625] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.018839] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302614, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.522922] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302614, 'name': CreateVM_Task, 'duration_secs': 0.287059} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.522922] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1436.523579] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.523740] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.524082] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1436.524336] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24f2d346-5d93-46b3-852d-bf9ef59e2f90 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.530410] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Waiting for the task: (returnval){ [ 1436.530410] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e6c457-eddc-63b3-b92f-8ac6ceb6f09d" [ 1436.530410] env[61356]: _type = "Task" [ 1436.530410] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.538117] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e6c457-eddc-63b3-b92f-8ac6ceb6f09d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.826396] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "4f103733-7586-4241-a766-cf13bef9e435" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.826611] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "4f103733-7586-4241-a766-cf13bef9e435" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.042429] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.042690] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1437.042912] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.529675] env[61356]: DEBUG nova.compute.manager [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Received event network-changed-914f2066-eafd-4744-abb3-47e0e60e172f {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1437.530092] env[61356]: DEBUG nova.compute.manager [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Refreshing instance network info cache due to event network-changed-914f2066-eafd-4744-abb3-47e0e60e172f. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1437.530092] env[61356]: DEBUG oslo_concurrency.lockutils [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] Acquiring lock "refresh_cache-3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.530292] env[61356]: DEBUG oslo_concurrency.lockutils [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] Acquired lock "refresh_cache-3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.530521] env[61356]: DEBUG nova.network.neutron [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Refreshing network info cache for port 914f2066-eafd-4744-abb3-47e0e60e172f {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1437.995165] env[61356]: DEBUG nova.network.neutron [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Updated VIF entry in instance network info cache for port 914f2066-eafd-4744-abb3-47e0e60e172f. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1437.995524] env[61356]: DEBUG nova.network.neutron [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Updating instance_info_cache with network_info: [{"id": "914f2066-eafd-4744-abb3-47e0e60e172f", "address": "fa:16:3e:6a:4f:90", "network": {"id": "c09a182d-1c2b-468c-97c8-2484556cdb67", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-2075268749-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb250b900f32424fb8422cf75824d0d3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap914f2066-ea", "ovs_interfaceid": "914f2066-eafd-4744-abb3-47e0e60e172f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.006980] env[61356]: DEBUG oslo_concurrency.lockutils [req-f2307a54-9dcd-4b2f-8c02-92a7342fc2f3 req-f3ee85b3-4e0d-4989-96b4-e5394b7d1f42 service nova] Releasing lock "refresh_cache-3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.506621] env[61356]: DEBUG oslo_concurrency.lockutils [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1464.772345] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1465.757995] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1465.769130] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.769343] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.769520] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.769668] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1465.770789] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31680ef8-64df-4fc0-93b6-8e25cda5a5ea {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.779285] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2039eee-1c66-4b44-ade6-7d2c585de5c9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.794123] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca38b9b4-6ae4-4088-bdc3-729b8494be36 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.800267] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a076f33e-e619-477a-b84e-7cbaa834a971 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.829158] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180691MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1465.829368] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.829572] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.021801] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 498180c6-cc07-4c5a-bf1e-784148909d6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022391] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022391] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022391] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022391] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022592] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022637] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022818] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.022926] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.023011] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.035642] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.050223] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.060896] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.073144] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.083262] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.093048] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance aa69cd0e-f97a-4c63-8887-9998847329ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.102150] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1466.102409] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1466.102583] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1466.119756] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing inventories for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1466.134771] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Updating ProviderTree inventory for provider cfb833eb-84f8-480d-866a-1ad2fb871484 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1466.134959] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Updating inventory in ProviderTree for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1466.146126] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing aggregate associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, aggregates: None {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1466.165463] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing trait associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1466.375628] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eedd20d-21ad-46c6-bb5b-109bc05e391d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.383227] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab3365b-5136-4c64-9a7c-68fca535413b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.412509] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca3233c-86f4-47e9-aff0-4bab523c72be {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.419150] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb2c677-6381-4ea4-9526-08bc8a153b76 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.431570] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1466.441503] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1466.455633] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1466.455810] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.626s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.456250] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.758635] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.758909] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.754686] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.757594] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.758601] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.758888] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1473.759242] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.759572] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1473.759621] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1473.782008] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.782175] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.782308] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.782453] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.782611] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.782741] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.782861] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.782979] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.783250] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.783430] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1473.783567] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1477.780018] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.952054] env[61356]: WARNING oslo_vmware.rw_handles [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1477.952054] env[61356]: ERROR oslo_vmware.rw_handles [ 1477.952054] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1477.953859] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1477.954117] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Copying Virtual Disk [datastore2] vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/f9045a32-5c2b-4092-a808-a9627fb0f681/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1477.954403] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-493678de-9e7e-42a8-b2bd-3d6df847790b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.962829] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Waiting for the task: (returnval){ [ 1477.962829] env[61356]: value = "task-4302615" [ 1477.962829] env[61356]: _type = "Task" [ 1477.962829] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.970430] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Task: {'id': task-4302615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.472357] env[61356]: DEBUG oslo_vmware.exceptions [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1478.472685] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.473260] env[61356]: ERROR nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1478.473260] env[61356]: Faults: ['InvalidArgument'] [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Traceback (most recent call last): [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] yield resources [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self.driver.spawn(context, instance, image_meta, [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self._fetch_image_if_missing(context, vi) [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] image_cache(vi, tmp_image_ds_loc) [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] vm_util.copy_virtual_disk( [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] session._wait_for_task(vmdk_copy_task) [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] return self.wait_for_task(task_ref) [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] return evt.wait() [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] result = hub.switch() [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] return self.greenlet.switch() [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self.f(*self.args, **self.kw) [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] raise exceptions.translate_fault(task_info.error) [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Faults: ['InvalidArgument'] [ 1478.473260] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] [ 1478.474118] env[61356]: INFO nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Terminating instance [ 1478.475100] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.475312] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1478.475564] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8e1aebb-4568-4496-951f-b4c05292241c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.477637] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1478.477824] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1478.478553] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6badf97d-f27e-44dd-8705-b77d8d606a43 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.486293] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1478.487288] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5cea5f6-17de-40b9-b381-b91911d48d91 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.488604] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1478.488784] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1478.489450] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea93fc98-be77-4a3c-8844-1f73c4bde9dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.494092] env[61356]: DEBUG oslo_vmware.api [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for the task: (returnval){ [ 1478.494092] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52858852-274f-c644-0305-a34307007d6a" [ 1478.494092] env[61356]: _type = "Task" [ 1478.494092] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.500911] env[61356]: DEBUG oslo_vmware.api [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52858852-274f-c644-0305-a34307007d6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.564991] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1478.565243] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1478.565421] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Deleting the datastore file [datastore2] 498180c6-cc07-4c5a-bf1e-784148909d6a {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.565708] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e123f2d0-3ea8-4b41-bf45-6b5dd300a7e1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.571919] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Waiting for the task: (returnval){ [ 1478.571919] env[61356]: value = "task-4302617" [ 1478.571919] env[61356]: _type = "Task" [ 1478.571919] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.579466] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Task: {'id': task-4302617, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.004116] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1479.004427] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Creating directory with path [datastore2] vmware_temp/f5029439-95b7-48e1-a229-8b7eed60a4c9/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.004608] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0afbdf93-803e-4ac5-94c6-8e44be9576af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.015387] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Created directory with path [datastore2] vmware_temp/f5029439-95b7-48e1-a229-8b7eed60a4c9/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.015576] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Fetch image to [datastore2] vmware_temp/f5029439-95b7-48e1-a229-8b7eed60a4c9/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1479.015747] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/f5029439-95b7-48e1-a229-8b7eed60a4c9/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1479.016457] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05e48ad-95e1-43dc-9cd0-36e0c1bedefb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.022847] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f9f7ed-5675-455f-9adf-ae625aa502a1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.031403] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764c61e8-4b26-4fbf-9d38-3c4aa0cc9642 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.060418] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e65d43-7521-4d9b-8f0d-22afe9b42c77 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.065699] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-23f3ee00-566f-4e2e-9320-f487b8651dcf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.079975] env[61356]: DEBUG oslo_vmware.api [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Task: {'id': task-4302617, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066609} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.080221] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.080493] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1479.080695] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1479.080869] env[61356]: INFO nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1479.083161] env[61356]: DEBUG nova.compute.claims [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1479.083330] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.083541] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.087920] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1479.277137] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.277925] env[61356]: ERROR nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = getattr(controller, method)(*args, **kwargs) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._get(image_id) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] resp, body = self.http_client.get(url, headers=header) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.request(url, 'GET', **kwargs) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._handle_response(resp) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exc.from_response(resp, resp.content) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During handling of the above exception, another exception occurred: [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] yield resources [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self.driver.spawn(context, instance, image_meta, [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._fetch_image_if_missing(context, vi) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image_fetch(context, vi, tmp_image_ds_loc) [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] images.fetch_image( [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1479.277925] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] metadata = IMAGE_API.get(context, image_ref) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return session.show(context, image_id, [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] _reraise_translated_image_exception(image_id) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise new_exc.with_traceback(exc_trace) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = getattr(controller, method)(*args, **kwargs) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._get(image_id) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] resp, body = self.http_client.get(url, headers=header) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.request(url, 'GET', **kwargs) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._handle_response(resp) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exc.from_response(resp, resp.content) [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1479.279102] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1479.279102] env[61356]: INFO nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Terminating instance [ 1479.279880] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.279916] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.280523] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1479.280711] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1479.282809] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b98fe549-e086-42b3-acd8-8ff90fa5f889 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.285415] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e59595c-dbaf-4c1a-a6bf-4d3953e2b147 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.292169] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1479.292391] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5faa2be1-255b-4008-9c29-4b52324f2f0f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.294463] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.294634] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1479.295535] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-153216c0-ed53-4b0d-8aa1-def0172862b8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.302088] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Waiting for the task: (returnval){ [ 1479.302088] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5267c480-a6d4-88d8-5455-e79bcc647db5" [ 1479.302088] env[61356]: _type = "Task" [ 1479.302088] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.308778] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5267c480-a6d4-88d8-5455-e79bcc647db5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.340702] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd4ddb0-b7a3-4342-9444-db9b6ca94928 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.347548] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588d387d-7f0a-45f4-9fb5-57a1f2b6db78 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.375933] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b4a06c-159e-4956-9abd-a4e04d4e3682 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.382774] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3a5217-81f6-40c3-8e74-2e6a83a4dace {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.387243] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1479.387444] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1479.387618] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Deleting the datastore file [datastore2] 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1479.388141] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e00e8134-07f8-4712-842e-140a6667db5c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.397180] env[61356]: DEBUG nova.compute.provider_tree [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1479.402534] env[61356]: DEBUG oslo_vmware.api [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for the task: (returnval){ [ 1479.402534] env[61356]: value = "task-4302619" [ 1479.402534] env[61356]: _type = "Task" [ 1479.402534] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.406217] env[61356]: DEBUG nova.scheduler.client.report [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1479.412065] env[61356]: DEBUG oslo_vmware.api [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': task-4302619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.425657] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.342s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.425948] env[61356]: ERROR nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1479.425948] env[61356]: Faults: ['InvalidArgument'] [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Traceback (most recent call last): [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self.driver.spawn(context, instance, image_meta, [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self._fetch_image_if_missing(context, vi) [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] image_cache(vi, tmp_image_ds_loc) [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] vm_util.copy_virtual_disk( [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] session._wait_for_task(vmdk_copy_task) [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] return self.wait_for_task(task_ref) [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] return evt.wait() [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] result = hub.switch() [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] return self.greenlet.switch() [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] self.f(*self.args, **self.kw) [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] raise exceptions.translate_fault(task_info.error) [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Faults: ['InvalidArgument'] [ 1479.425948] env[61356]: ERROR nova.compute.manager [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] [ 1479.427022] env[61356]: DEBUG nova.compute.utils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1479.428426] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Build of instance 498180c6-cc07-4c5a-bf1e-784148909d6a was re-scheduled: A specified parameter was not correct: fileType [ 1479.428426] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1479.428789] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1479.428963] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1479.429157] env[61356]: DEBUG nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1479.429318] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1479.815467] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1479.815843] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Creating directory with path [datastore2] vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.816180] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8de8615-9eba-458d-a98f-311c16607d04 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.828114] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Created directory with path [datastore2] vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.828324] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Fetch image to [datastore2] vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1479.828493] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1479.829238] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4b746f-8afe-4734-81e0-30e24674249a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.836045] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6056aabe-bc58-4e44-9825-56c63a018a0b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.845162] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b9d4de-5d9e-47f9-a4a3-e92b98e37a1e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.875304] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8e8bab-37d3-4812-abbb-0d270726dc39 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.880620] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-99629312-5b28-43d9-a727-3a3cdd2b62bb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.901684] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1479.914330] env[61356]: DEBUG oslo_vmware.api [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Task: {'id': task-4302619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086224} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.914535] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.914720] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1479.914891] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1479.915411] env[61356]: INFO nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1479.918438] env[61356]: DEBUG nova.compute.claims [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1479.918657] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.919100] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.968763] env[61356]: DEBUG oslo_vmware.rw_handles [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1480.031432] env[61356]: DEBUG oslo_vmware.rw_handles [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1480.031778] env[61356]: DEBUG oslo_vmware.rw_handles [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1480.196638] env[61356]: DEBUG nova.network.neutron [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.210976] env[61356]: INFO nova.compute.manager [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Took 0.78 seconds to deallocate network for instance. [ 1480.238871] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c904ab5b-b12b-47c0-a059-05977a61bb26 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.246435] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd77ce9-7ec9-4692-bb1c-a8b42e88c4b6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.279479] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0eae296-e58e-4fd6-9937-87fea00ef498 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.291045] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8256922-dda5-4816-8877-938311ea67b2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.304208] env[61356]: DEBUG nova.compute.provider_tree [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1480.312550] env[61356]: DEBUG nova.scheduler.client.report [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1480.324680] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.406s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.325408] env[61356]: ERROR nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = getattr(controller, method)(*args, **kwargs) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._get(image_id) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] resp, body = self.http_client.get(url, headers=header) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.request(url, 'GET', **kwargs) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._handle_response(resp) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exc.from_response(resp, resp.content) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During handling of the above exception, another exception occurred: [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self.driver.spawn(context, instance, image_meta, [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._fetch_image_if_missing(context, vi) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image_fetch(context, vi, tmp_image_ds_loc) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] images.fetch_image( [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] metadata = IMAGE_API.get(context, image_ref) [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1480.325408] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return session.show(context, image_id, [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] _reraise_translated_image_exception(image_id) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise new_exc.with_traceback(exc_trace) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = getattr(controller, method)(*args, **kwargs) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._get(image_id) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] resp, body = self.http_client.get(url, headers=header) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.request(url, 'GET', **kwargs) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._handle_response(resp) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exc.from_response(resp, resp.content) [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1480.326464] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.326464] env[61356]: DEBUG nova.compute.utils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1480.327430] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Build of instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 was re-scheduled: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1480.327907] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1480.328384] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1480.328384] env[61356]: DEBUG nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1480.328547] env[61356]: DEBUG nova.network.neutron [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1480.336243] env[61356]: INFO nova.scheduler.client.report [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Deleted allocations for instance 498180c6-cc07-4c5a-bf1e-784148909d6a [ 1480.355390] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ab6a1f7b-5351-4cdc-a8d0-0f952c65c010 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 626.136s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.356577] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.957s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.356808] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Acquiring lock "498180c6-cc07-4c5a-bf1e-784148909d6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.357015] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.357183] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.359202] env[61356]: INFO nova.compute.manager [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Terminating instance [ 1480.360843] env[61356]: DEBUG nova.compute.manager [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1480.361049] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1480.361574] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0131b95f-1ef6-4ef9-8728-0a20eb41321c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.369973] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45cbe76-d9ac-4a4c-bc26-a36c8dc4e195 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.380437] env[61356]: DEBUG nova.compute.manager [None req-b45f01f9-4dbd-4825-92c9-ab48d2739f26 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: e4d2004e-9c64-44e7-b510-895ae532e546] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1480.400861] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 498180c6-cc07-4c5a-bf1e-784148909d6a could not be found. [ 1480.401083] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1480.401260] env[61356]: INFO nova.compute.manager [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1480.401536] env[61356]: DEBUG oslo.service.loopingcall [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.401745] env[61356]: DEBUG nova.compute.manager [-] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1480.401841] env[61356]: DEBUG nova.network.neutron [-] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1480.405711] env[61356]: DEBUG nova.compute.manager [None req-b45f01f9-4dbd-4825-92c9-ab48d2739f26 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] [instance: e4d2004e-9c64-44e7-b510-895ae532e546] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1480.428024] env[61356]: DEBUG neutronclient.v2_0.client [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61356) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1480.428024] env[61356]: ERROR nova.compute.manager [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = getattr(controller, method)(*args, **kwargs) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._get(image_id) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] resp, body = self.http_client.get(url, headers=header) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.request(url, 'GET', **kwargs) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._handle_response(resp) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exc.from_response(resp, resp.content) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During handling of the above exception, another exception occurred: [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self.driver.spawn(context, instance, image_meta, [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._fetch_image_if_missing(context, vi) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image_fetch(context, vi, tmp_image_ds_loc) [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] images.fetch_image( [ 1480.428024] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] metadata = IMAGE_API.get(context, image_ref) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return session.show(context, image_id, [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] _reraise_translated_image_exception(image_id) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise new_exc.with_traceback(exc_trace) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = getattr(controller, method)(*args, **kwargs) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._get(image_id) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] resp, body = self.http_client.get(url, headers=header) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.request(url, 'GET', **kwargs) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self._handle_response(resp) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exc.from_response(resp, resp.content) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During handling of the above exception, another exception occurred: [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._build_and_run_instance(context, instance, image, [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exception.RescheduledException( [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] nova.exception.RescheduledException: Build of instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 was re-scheduled: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During handling of the above exception, another exception occurred: [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.429062] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] exception_handler_v20(status_code, error_body) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise client_exc(message=error_message, [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Neutron server returns request_ids: ['req-79e9b273-5400-41eb-bc6c-18ec681896e1'] [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During handling of the above exception, another exception occurred: [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._deallocate_network(context, instance, requested_networks) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self.network_api.deallocate_for_instance( [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] data = neutron.list_ports(**search_opts) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.list('ports', self.ports_path, retrieve_all, [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] for r in self._pagination(collection, path, **params): [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] res = self.get(path, params=params) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.retry_request("GET", action, body=body, [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.do_request(method, action, body=body, [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._handle_fault_response(status_code, replybody, resp) [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1480.430135] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exception.Unauthorized() [ 1480.431821] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] nova.exception.Unauthorized: Not authorized. [ 1480.431821] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.431821] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b45f01f9-4dbd-4825-92c9-ab48d2739f26 tempest-AttachInterfacesTestJSON-473518452 tempest-AttachInterfacesTestJSON-473518452-project-member] Lock "e4d2004e-9c64-44e7-b510-895ae532e546" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.154s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.440940] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1480.449277] env[61356]: DEBUG nova.network.neutron [-] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.464721] env[61356]: INFO nova.compute.manager [-] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] Took 0.06 seconds to deallocate network for instance. [ 1480.501856] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.502127] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.503828] env[61356]: INFO nova.compute.claims [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.508102] env[61356]: INFO nova.scheduler.client.report [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Deleted allocations for instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 [ 1480.544799] env[61356]: DEBUG oslo_concurrency.lockutils [None req-954880b9-cc62-47da-8a4a-e208ebba2237 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.263s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.546160] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.658s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.546413] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Acquiring lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.546650] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.546855] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.548916] env[61356]: INFO nova.compute.manager [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Terminating instance [ 1480.550614] env[61356]: DEBUG nova.compute.manager [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1480.550805] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1480.551664] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-044c0924-0ca3-40ab-9cd3-881f1d3d4e72 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.563842] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a60e8f-dc78-4145-9435-9751d69bcffd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.577278] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1480.584360] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5e32e189-ead3-475f-97fb-f53313e33163 tempest-ServerRescueTestJSONUnderV235-959599176 tempest-ServerRescueTestJSONUnderV235-959599176-project-member] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.227s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.585344] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 361.544s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.585344] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 498180c6-cc07-4c5a-bf1e-784148909d6a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1480.585344] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "498180c6-cc07-4c5a-bf1e-784148909d6a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.599175] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282 could not be found. [ 1480.599405] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1480.599587] env[61356]: INFO nova.compute.manager [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1480.599834] env[61356]: DEBUG oslo.service.loopingcall [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.602738] env[61356]: DEBUG nova.compute.manager [-] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1480.602852] env[61356]: DEBUG nova.network.neutron [-] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1480.639626] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.703022] env[61356]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61356) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1480.703022] env[61356]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-1ebaa196-cd3d-4562-92e1-d3ab4b066fad'] [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1480.703309] env[61356]: ERROR oslo.service.loopingcall [ 1480.704797] env[61356]: ERROR nova.compute.manager [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1480.733830] env[61356]: ERROR nova.compute.manager [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] exception_handler_v20(status_code, error_body) [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise client_exc(message=error_message, [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Neutron server returns request_ids: ['req-1ebaa196-cd3d-4562-92e1-d3ab4b066fad'] [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During handling of the above exception, another exception occurred: [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Traceback (most recent call last): [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._delete_instance(context, instance, bdms) [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._shutdown_instance(context, instance, bdms) [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._try_deallocate_network(context, instance, requested_networks) [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] with excutils.save_and_reraise_exception(): [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self.force_reraise() [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise self.value [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] _deallocate_network_with_retries() [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return evt.wait() [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = hub.switch() [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.greenlet.switch() [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = func(*self.args, **self.kw) [ 1480.733830] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] result = f(*args, **kwargs) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._deallocate_network( [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self.network_api.deallocate_for_instance( [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] data = neutron.list_ports(**search_opts) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.list('ports', self.ports_path, retrieve_all, [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] for r in self._pagination(collection, path, **params): [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] res = self.get(path, params=params) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.retry_request("GET", action, body=body, [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] return self.do_request(method, action, body=body, [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] ret = obj(*args, **kwargs) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] self._handle_fault_response(status_code, replybody, resp) [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1480.734951] env[61356]: ERROR nova.compute.manager [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] [ 1480.767076] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0973b6a7-aab4-49b1-b30c-07ebf8a1d48b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.774504] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf9b5a0-7f8e-4ec5-9a97-caf8b333f674 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.781189] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.235s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.805950] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 361.765s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.806160] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] During sync_power_state the instance has a pending task (deleting). Skip. [ 1480.806335] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "85ad15ec-9ba0-48fe-b5dd-ae70bbca6282" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.809006] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be3dab9-dea7-4f38-a528-4bf5914feb93 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.816344] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db2dbf4-925c-45a6-be7b-7ed599934fcb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.830853] env[61356]: DEBUG nova.compute.provider_tree [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1480.839379] env[61356]: DEBUG nova.scheduler.client.report [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1480.855703] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.353s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.856224] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1480.859320] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.220s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.860719] env[61356]: INFO nova.compute.claims [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.863385] env[61356]: INFO nova.compute.manager [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] [instance: 85ad15ec-9ba0-48fe-b5dd-ae70bbca6282] Successfully reverted task state from None on failure for instance. [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server [None req-6db794d3-9f2b-4021-89e1-bcf6a6dd4f82 tempest-ServersTestMultiNic-1268258457 tempest-ServersTestMultiNic-1268258457-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-1ebaa196-cd3d-4562-92e1-d3ab4b066fad'] [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1480.867244] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1480.868724] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1480.870250] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1480.870250] env[61356]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1480.870250] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1480.870250] env[61356]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1480.870250] env[61356]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1480.870250] env[61356]: ERROR oslo_messaging.rpc.server [ 1480.887952] env[61356]: DEBUG nova.compute.utils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1480.889338] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Not allocating networking since 'none' was specified. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1480.897097] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1480.966855] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1480.991892] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1480.992188] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1480.992300] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1480.992492] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1480.992657] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1480.992810] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1480.993152] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1480.993238] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1480.993408] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1480.993573] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1480.993748] env[61356]: DEBUG nova.virt.hardware [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1480.994612] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5024bf-81c6-454e-ba75-464cbd3b99d6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.005113] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91364309-96e1-4977-bd12-9a32813d2a03 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.019785] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance VIF info [] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1481.025196] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Creating folder: Project (3ef1c7aa1b7b47a78cd17597fc57ca1b). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1481.025527] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b68a1abe-52a9-48fd-8b7d-50e84df06c61 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.037745] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Created folder: Project (3ef1c7aa1b7b47a78cd17597fc57ca1b) in parent group-v846926. [ 1481.037745] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Creating folder: Instances. Parent ref: group-v847018. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1481.038185] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea2ef12b-7251-4383-99b5-346eddc22715 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.045732] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Created folder: Instances in parent group-v847018. [ 1481.045970] env[61356]: DEBUG oslo.service.loopingcall [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1481.046175] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1481.046474] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5d37c33-2d21-4705-b946-028c8235afa7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.066026] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1481.066026] env[61356]: value = "task-4302622" [ 1481.066026] env[61356]: _type = "Task" [ 1481.066026] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.073367] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302622, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.150792] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec5029c-462b-4d96-bacb-d1a3b7c8034e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.159986] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351b2503-684b-45de-aaa6-f123c2fe21ca {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.191791] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63411132-cf8d-47e9-87ce-977936320116 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.200025] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fa7775-9e29-4a88-9007-07aff29d4b25 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.212565] env[61356]: DEBUG nova.compute.provider_tree [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1481.221872] env[61356]: DEBUG nova.scheduler.client.report [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1481.239449] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.380s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.239973] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1481.282374] env[61356]: DEBUG nova.compute.utils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1481.283750] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Not allocating networking since 'none' was specified. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1481.292911] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1481.356486] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1481.381163] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1481.381412] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1481.381628] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1481.381824] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1481.381969] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1481.382135] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1481.382340] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1481.382512] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1481.382704] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1481.382865] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1481.383053] env[61356]: DEBUG nova.virt.hardware [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1481.383971] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d8d3f9-7a2c-46c6-972b-d7671657e250 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.391352] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3029a841-6f26-4db0-b40c-49c768b94568 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.405544] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance VIF info [] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1481.411016] env[61356]: DEBUG oslo.service.loopingcall [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1481.411231] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1481.411421] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd9d7b14-34e4-4855-8806-f520184260be {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.426075] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1481.426075] env[61356]: value = "task-4302623" [ 1481.426075] env[61356]: _type = "Task" [ 1481.426075] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.433134] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302623, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.576040] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302622, 'name': CreateVM_Task, 'duration_secs': 0.265895} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.576233] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1481.576628] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.576791] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.577132] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1481.577381] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-060c5bcb-8d17-4ad1-ba4e-599bd4cdf776 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.581640] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for the task: (returnval){ [ 1481.581640] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]522aa3b2-b9ea-225c-0c51-9c77877f49ec" [ 1481.581640] env[61356]: _type = "Task" [ 1481.581640] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.589164] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]522aa3b2-b9ea-225c-0c51-9c77877f49ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.936482] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302623, 'name': CreateVM_Task, 'duration_secs': 0.258983} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.936644] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1481.936975] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.091670] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.091954] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1482.092159] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.092369] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.092691] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1482.092938] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-363850dc-7950-43f5-98fc-76ff0bee6181 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.097239] env[61356]: DEBUG oslo_vmware.api [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for the task: (returnval){ [ 1482.097239] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c0a3a9-892d-2b8d-f486-3fda9c4e1c13" [ 1482.097239] env[61356]: _type = "Task" [ 1482.097239] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.104126] env[61356]: DEBUG oslo_vmware.api [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c0a3a9-892d-2b8d-f486-3fda9c4e1c13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.607680] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.607931] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1482.608166] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.432028] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.758502] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.698153] env[61356]: WARNING oslo_vmware.rw_handles [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1525.698153] env[61356]: ERROR oslo_vmware.rw_handles [ 1525.698855] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1525.700855] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1525.701146] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Copying Virtual Disk [datastore2] vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/f45524ee-57a3-45d6-8b6f-fb0efc2f655e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1525.701426] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbd828db-617e-4d3a-bbf1-686e583c4d6a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.710700] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Waiting for the task: (returnval){ [ 1525.710700] env[61356]: value = "task-4302624" [ 1525.710700] env[61356]: _type = "Task" [ 1525.710700] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.718371] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Task: {'id': task-4302624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.757954] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.769689] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.769998] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.770114] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.770285] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1525.771593] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e301b8b-305c-46d9-8bb3-8c7bbbc393f0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.779493] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11a52fb-2667-405f-91d3-85f55c24eebd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.793023] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cf4679-792d-4969-ba1d-c1be967d5913 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.799183] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6518b514-f52a-4389-acb7-164472d8d297 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.828369] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180690MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1525.828523] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.829979] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.907933] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.908206] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.908402] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.908545] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.908668] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.908788] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.908904] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.909030] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.909151] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.909264] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1525.921497] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1525.932373] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1525.949104] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1525.960104] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance aa69cd0e-f97a-4c63-8887-9998847329ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1525.971179] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1525.971537] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1525.971821] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1526.171359] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e444a994-1eff-43ad-b013-b82558b2b2ed {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.179093] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db35f7a6-0378-4093-be3d-a45f9a9b6e06 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.208446] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd1f034-034d-4330-b873-5765058a92f6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.216819] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5461da56-34eb-4434-bdad-5cc3cf2ecc7c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.223192] env[61356]: DEBUG oslo_vmware.exceptions [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1526.223767] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.224390] env[61356]: ERROR nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1526.224390] env[61356]: Faults: ['InvalidArgument'] [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Traceback (most recent call last): [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] yield resources [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self.driver.spawn(context, instance, image_meta, [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self._fetch_image_if_missing(context, vi) [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] image_cache(vi, tmp_image_ds_loc) [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] vm_util.copy_virtual_disk( [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] session._wait_for_task(vmdk_copy_task) [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] return self.wait_for_task(task_ref) [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] return evt.wait() [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] result = hub.switch() [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] return self.greenlet.switch() [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self.f(*self.args, **self.kw) [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] raise exceptions.translate_fault(task_info.error) [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Faults: ['InvalidArgument'] [ 1526.224390] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] [ 1526.225333] env[61356]: INFO nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Terminating instance [ 1526.234124] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.234346] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.234742] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.236192] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1526.236397] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1526.236618] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0be6b3f6-b121-4c32-964e-e2c13b2fd8f3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.238659] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cbe377-6fe0-4195-b230-3f81f3a7a21a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.243448] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1526.248645] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1526.249718] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13514a12-4c52-42eb-9e7e-3eb1dc3b50d6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.251215] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.251389] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1526.252360] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a83a7744-325a-48a0-9a38-fb8a2481dcd5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.257081] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1526.257228] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.429s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.258697] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Waiting for the task: (returnval){ [ 1526.258697] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52258339-9ce1-1d61-4ad1-a660816ed758" [ 1526.258697] env[61356]: _type = "Task" [ 1526.258697] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.266858] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52258339-9ce1-1d61-4ad1-a660816ed758, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.320684] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1526.321012] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1526.321116] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Deleting the datastore file [datastore2] d30d291d-6cfc-492e-bbbe-1ff021d4ee75 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.321386] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9bb12591-482f-4f44-8781-f984b03fc81d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.328608] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Waiting for the task: (returnval){ [ 1526.328608] env[61356]: value = "task-4302626" [ 1526.328608] env[61356]: _type = "Task" [ 1526.328608] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.337526] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Task: {'id': task-4302626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.769240] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1526.769498] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Creating directory with path [datastore2] vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.769717] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a91dfedf-23ae-40f7-bffc-68239ddc334e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.780853] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Created directory with path [datastore2] vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.781045] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Fetch image to [datastore2] vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1526.781219] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1526.781907] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5174ced-b374-4303-833e-b59fe663de24 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.788470] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bcf379-ca4d-4283-96f3-a2ce6d995060 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.797016] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acec465a-d92e-490c-a01d-c6384f47a7d3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.827480] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569ee1ee-3af4-4aa2-abde-65e7b2bb3777 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.838319] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9b592f27-4249-4379-a0e5-ee84dc7e423b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.839929] env[61356]: DEBUG oslo_vmware.api [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Task: {'id': task-4302626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077843} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.840176] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1526.840354] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1526.840524] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1526.840695] env[61356]: INFO nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1526.842848] env[61356]: DEBUG nova.compute.claims [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1526.843105] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.843331] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.863302] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1526.921727] env[61356]: DEBUG oslo_vmware.rw_handles [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1526.980693] env[61356]: DEBUG oslo_vmware.rw_handles [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1526.980693] env[61356]: DEBUG oslo_vmware.rw_handles [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1527.156268] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65f56bc-18e4-4a5f-979d-5ddd77c68389 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.163973] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3ec681-3183-4f23-a574-5281b1ffbd03 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.193393] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691cce5e-257e-4959-bc7d-312d2865fcdd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.200914] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e52888-a46f-471e-ae61-8304676d74d1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.214686] env[61356]: DEBUG nova.compute.provider_tree [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.223389] env[61356]: DEBUG nova.scheduler.client.report [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.237252] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.394s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.237767] env[61356]: ERROR nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1527.237767] env[61356]: Faults: ['InvalidArgument'] [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Traceback (most recent call last): [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self.driver.spawn(context, instance, image_meta, [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self._fetch_image_if_missing(context, vi) [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] image_cache(vi, tmp_image_ds_loc) [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] vm_util.copy_virtual_disk( [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] session._wait_for_task(vmdk_copy_task) [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] return self.wait_for_task(task_ref) [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] return evt.wait() [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] result = hub.switch() [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] return self.greenlet.switch() [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] self.f(*self.args, **self.kw) [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] raise exceptions.translate_fault(task_info.error) [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Faults: ['InvalidArgument'] [ 1527.237767] env[61356]: ERROR nova.compute.manager [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] [ 1527.238602] env[61356]: DEBUG nova.compute.utils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1527.243722] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Build of instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 was re-scheduled: A specified parameter was not correct: fileType [ 1527.243722] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1527.244115] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1527.244292] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1527.244446] env[61356]: DEBUG nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1527.244604] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1527.556122] env[61356]: DEBUG nova.network.neutron [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.567495] env[61356]: INFO nova.compute.manager [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Took 0.32 seconds to deallocate network for instance. [ 1527.896050] env[61356]: INFO nova.scheduler.client.report [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Deleted allocations for instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 [ 1527.919100] env[61356]: DEBUG oslo_concurrency.lockutils [None req-ce2e26b1-b866-4240-a259-23aba0898d10 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 581.610s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.920675] env[61356]: DEBUG oslo_concurrency.lockutils [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 385.709s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.921034] env[61356]: DEBUG oslo_concurrency.lockutils [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Acquiring lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.922038] env[61356]: DEBUG oslo_concurrency.lockutils [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.922038] env[61356]: DEBUG oslo_concurrency.lockutils [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.923757] env[61356]: INFO nova.compute.manager [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Terminating instance [ 1527.925691] env[61356]: DEBUG nova.compute.manager [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1527.925885] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1527.926921] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21a47608-421c-462b-ab23-dffc73b495ef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.930987] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1527.938566] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3d548c-3b9a-47f4-bddf-56976e1f89a9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.970400] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d30d291d-6cfc-492e-bbbe-1ff021d4ee75 could not be found. [ 1527.970603] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1527.970777] env[61356]: INFO nova.compute.manager [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1527.971039] env[61356]: DEBUG oslo.service.loopingcall [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.973366] env[61356]: DEBUG nova.compute.manager [-] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1527.973466] env[61356]: DEBUG nova.network.neutron [-] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1527.986902] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.987151] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.988636] env[61356]: INFO nova.compute.claims [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1528.000573] env[61356]: DEBUG nova.network.neutron [-] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.022446] env[61356]: INFO nova.compute.manager [-] [instance: d30d291d-6cfc-492e-bbbe-1ff021d4ee75] Took 0.05 seconds to deallocate network for instance. [ 1528.133674] env[61356]: DEBUG oslo_concurrency.lockutils [None req-837f6977-4042-407c-83bc-8c4607066695 tempest-ServersAdminNegativeTestJSON-1298524168 tempest-ServersAdminNegativeTestJSON-1298524168-project-member] Lock "d30d291d-6cfc-492e-bbbe-1ff021d4ee75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.232115] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff44f564-4f3a-4f60-a0b9-fd858a2218a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.239440] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807897ff-1ada-4c45-b3f6-64225dd1fc4d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.268209] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2830316d-01f8-4cae-9cde-5c2d4b475937 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.275986] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cc2c14-1276-4dcc-942c-e98032021aa7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.290118] env[61356]: DEBUG nova.compute.provider_tree [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.299011] env[61356]: DEBUG nova.scheduler.client.report [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1528.313456] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.313923] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1528.351211] env[61356]: DEBUG nova.compute.utils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.352916] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1528.352916] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1528.361771] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1528.419363] env[61356]: DEBUG nova.policy [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'de9b77585e1d40b188773f5e6eedaf19', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c1e6ac857b884af5b100b0c012967d07', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1528.447021] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1528.473382] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1528.473629] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1528.473786] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1528.473967] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1528.474131] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1528.474280] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1528.474484] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1528.474644] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1528.474806] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1528.475032] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1528.475216] env[61356]: DEBUG nova.virt.hardware [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1528.476058] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5c278c-5aab-4e1f-9c34-86bf6b4f2d7f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.483652] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9a132c-2236-4b3d-bb3d-fb87cf2797c0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.747175] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Successfully created port: 06596486-c37b-48bb-ad4f-2c3be60adea5 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1529.259769] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.579324] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Successfully updated port: 06596486-c37b-48bb-ad4f-2c3be60adea5 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1529.594388] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "refresh_cache-9958f378-1cab-49bf-8375-5e95c16401e4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.594492] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquired lock "refresh_cache-9958f378-1cab-49bf-8375-5e95c16401e4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.594633] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1529.608884] env[61356]: DEBUG nova.compute.manager [req-f2238b3f-5498-4c12-910d-3694fe836954 req-c567ae20-db1b-49dd-8596-1839118d3c1c service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Received event network-vif-plugged-06596486-c37b-48bb-ad4f-2c3be60adea5 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1529.609099] env[61356]: DEBUG oslo_concurrency.lockutils [req-f2238b3f-5498-4c12-910d-3694fe836954 req-c567ae20-db1b-49dd-8596-1839118d3c1c service nova] Acquiring lock "9958f378-1cab-49bf-8375-5e95c16401e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.609297] env[61356]: DEBUG oslo_concurrency.lockutils [req-f2238b3f-5498-4c12-910d-3694fe836954 req-c567ae20-db1b-49dd-8596-1839118d3c1c service nova] Lock "9958f378-1cab-49bf-8375-5e95c16401e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.609458] env[61356]: DEBUG oslo_concurrency.lockutils [req-f2238b3f-5498-4c12-910d-3694fe836954 req-c567ae20-db1b-49dd-8596-1839118d3c1c service nova] Lock "9958f378-1cab-49bf-8375-5e95c16401e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.609618] env[61356]: DEBUG nova.compute.manager [req-f2238b3f-5498-4c12-910d-3694fe836954 req-c567ae20-db1b-49dd-8596-1839118d3c1c service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] No waiting events found dispatching network-vif-plugged-06596486-c37b-48bb-ad4f-2c3be60adea5 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1529.611016] env[61356]: WARNING nova.compute.manager [req-f2238b3f-5498-4c12-910d-3694fe836954 req-c567ae20-db1b-49dd-8596-1839118d3c1c service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Received unexpected event network-vif-plugged-06596486-c37b-48bb-ad4f-2c3be60adea5 for instance with vm_state building and task_state spawning. [ 1529.645110] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1529.757878] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.758110] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.809773] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Updating instance_info_cache with network_info: [{"id": "06596486-c37b-48bb-ad4f-2c3be60adea5", "address": "fa:16:3e:d4:b9:f5", "network": {"id": "4bdfeb12-e65d-49a4-9914-5a96af15f89f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-60678297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1e6ac857b884af5b100b0c012967d07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06596486-c3", "ovs_interfaceid": "06596486-c37b-48bb-ad4f-2c3be60adea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.820993] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Releasing lock "refresh_cache-9958f378-1cab-49bf-8375-5e95c16401e4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.821280] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Instance network_info: |[{"id": "06596486-c37b-48bb-ad4f-2c3be60adea5", "address": "fa:16:3e:d4:b9:f5", "network": {"id": "4bdfeb12-e65d-49a4-9914-5a96af15f89f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-60678297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1e6ac857b884af5b100b0c012967d07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06596486-c3", "ovs_interfaceid": "06596486-c37b-48bb-ad4f-2c3be60adea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1529.821666] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:b9:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06596486-c37b-48bb-ad4f-2c3be60adea5', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.828994] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Creating folder: Project (c1e6ac857b884af5b100b0c012967d07). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1529.829488] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8948111-27fb-42f8-bd20-5ebd968d5f87 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.840340] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Created folder: Project (c1e6ac857b884af5b100b0c012967d07) in parent group-v846926. [ 1529.840516] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Creating folder: Instances. Parent ref: group-v847022. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1529.840721] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4109f8db-6285-4053-9e05-ba4ecce7b939 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.848875] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Created folder: Instances in parent group-v847022. [ 1529.849099] env[61356]: DEBUG oslo.service.loopingcall [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.849272] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1529.849452] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1193b18c-3c49-4039-84f2-8a4996761545 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.866876] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.866876] env[61356]: value = "task-4302629" [ 1529.866876] env[61356]: _type = "Task" [ 1529.866876] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.873775] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302629, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.377112] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302629, 'name': CreateVM_Task, 'duration_secs': 0.309097} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.377414] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1530.378051] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.378222] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.378533] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.378781] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-141dc954-5889-4363-b649-7f437d51f580 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.383297] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Waiting for the task: (returnval){ [ 1530.383297] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e91764-fb34-77f1-ba5d-6dabcc02a7af" [ 1530.383297] env[61356]: _type = "Task" [ 1530.383297] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.391915] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e91764-fb34-77f1-ba5d-6dabcc02a7af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.754103] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1530.893616] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.893880] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.894108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.642308] env[61356]: DEBUG nova.compute.manager [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Received event network-changed-06596486-c37b-48bb-ad4f-2c3be60adea5 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1531.642308] env[61356]: DEBUG nova.compute.manager [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Refreshing instance network info cache due to event network-changed-06596486-c37b-48bb-ad4f-2c3be60adea5. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1531.643392] env[61356]: DEBUG oslo_concurrency.lockutils [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] Acquiring lock "refresh_cache-9958f378-1cab-49bf-8375-5e95c16401e4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.643392] env[61356]: DEBUG oslo_concurrency.lockutils [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] Acquired lock "refresh_cache-9958f378-1cab-49bf-8375-5e95c16401e4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.643392] env[61356]: DEBUG nova.network.neutron [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Refreshing network info cache for port 06596486-c37b-48bb-ad4f-2c3be60adea5 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1531.757658] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.757839] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1531.906631] env[61356]: DEBUG nova.network.neutron [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Updated VIF entry in instance network info cache for port 06596486-c37b-48bb-ad4f-2c3be60adea5. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1531.906991] env[61356]: DEBUG nova.network.neutron [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Updating instance_info_cache with network_info: [{"id": "06596486-c37b-48bb-ad4f-2c3be60adea5", "address": "fa:16:3e:d4:b9:f5", "network": {"id": "4bdfeb12-e65d-49a4-9914-5a96af15f89f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-60678297-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c1e6ac857b884af5b100b0c012967d07", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06596486-c3", "ovs_interfaceid": "06596486-c37b-48bb-ad4f-2c3be60adea5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.916652] env[61356]: DEBUG oslo_concurrency.lockutils [req-4a0e1561-9f81-447c-a19d-4d38a45bf22b req-dc77548d-417b-4f32-9473-2e70b4c10d98 service nova] Releasing lock "refresh_cache-9958f378-1cab-49bf-8375-5e95c16401e4" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.760897] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.759237] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.759522] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1534.759557] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1534.785806] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.785974] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786122] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786257] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786383] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786504] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786624] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786743] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786861] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.786978] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1534.787115] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1543.999406] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.999744] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.107181] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.107540] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.993582] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2f206047-c256-41e6-8ce5-f1e490e2b6a5 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "344f33b2-9314-478a-b714-003090c9c342" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.993906] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2f206047-c256-41e6-8ce5-f1e490e2b6a5 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "344f33b2-9314-478a-b714-003090c9c342" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.010624] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "9958f378-1cab-49bf-8375-5e95c16401e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.985707] env[61356]: WARNING oslo_vmware.rw_handles [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1572.985707] env[61356]: ERROR oslo_vmware.rw_handles [ 1572.986221] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1572.988407] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1572.988678] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Copying Virtual Disk [datastore2] vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/4392b0f6-0ba8-4c90-84e6-32f7e0b6b0ad/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1572.988998] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6a3547a-3b0b-40d0-966b-ce14e72c176f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.997510] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Waiting for the task: (returnval){ [ 1572.997510] env[61356]: value = "task-4302630" [ 1572.997510] env[61356]: _type = "Task" [ 1572.997510] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.005945] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Task: {'id': task-4302630, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.508047] env[61356]: DEBUG oslo_vmware.exceptions [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1573.508047] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.508421] env[61356]: ERROR nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1573.508421] env[61356]: Faults: ['InvalidArgument'] [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Traceback (most recent call last): [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] yield resources [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self.driver.spawn(context, instance, image_meta, [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self._fetch_image_if_missing(context, vi) [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] image_cache(vi, tmp_image_ds_loc) [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] vm_util.copy_virtual_disk( [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] session._wait_for_task(vmdk_copy_task) [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] return self.wait_for_task(task_ref) [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] return evt.wait() [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] result = hub.switch() [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] return self.greenlet.switch() [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self.f(*self.args, **self.kw) [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] raise exceptions.translate_fault(task_info.error) [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Faults: ['InvalidArgument'] [ 1573.508421] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] [ 1573.509335] env[61356]: INFO nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Terminating instance [ 1573.510271] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.510547] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.510784] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef3a53c3-938d-42da-a9d5-63b4ad229d05 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.513042] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1573.513238] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1573.513968] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bc1fd3-570f-4c0a-b49f-73460991499b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.520918] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1573.522125] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98231797-c063-4382-9f21-f04429eeb371 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.523352] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.523527] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1573.524187] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ffbccb9-413b-49e3-9224-4c51dbc8a59c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.529161] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Waiting for the task: (returnval){ [ 1573.529161] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]528d4162-f2a4-6475-e3da-280e26abb7f0" [ 1573.529161] env[61356]: _type = "Task" [ 1573.529161] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.541393] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]528d4162-f2a4-6475-e3da-280e26abb7f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.590685] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1573.590909] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1573.591156] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Deleting the datastore file [datastore2] c22e625c-3fd2-4280-9cc7-bb7e08d700d5 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1573.591382] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-500541c0-eb25-4042-a9fc-d5b268280291 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.597894] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Waiting for the task: (returnval){ [ 1573.597894] env[61356]: value = "task-4302632" [ 1573.597894] env[61356]: _type = "Task" [ 1573.597894] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.605153] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Task: {'id': task-4302632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.038787] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1574.039064] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Creating directory with path [datastore2] vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1574.039296] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92a1730a-d28d-4324-9779-f3e49fea6982 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.050400] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Created directory with path [datastore2] vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1574.050588] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Fetch image to [datastore2] vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1574.050756] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1574.051461] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a5ae50-38f1-4292-bf1c-a9431e911d8e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.058446] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16936fde-c354-480c-a9de-d63017a9e707 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.067281] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac1304a-114d-424d-b005-d78eaa54c65f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.098421] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87109e95-372f-45b4-ae2f-e56d16ef26ac {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.108973] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae87c67a-04c7-4349-9373-97909bbdf939 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.110617] env[61356]: DEBUG oslo_vmware.api [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Task: {'id': task-4302632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066247} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.110843] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1574.111029] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1574.111206] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1574.111374] env[61356]: INFO nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1574.113435] env[61356]: DEBUG nova.compute.claims [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1574.113629] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.113831] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.130855] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1574.181743] env[61356]: DEBUG oslo_vmware.rw_handles [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1574.239832] env[61356]: DEBUG oslo_vmware.rw_handles [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1574.240116] env[61356]: DEBUG oslo_vmware.rw_handles [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1574.407163] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1181c577-e283-477b-8897-76dfaab9df13 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.414868] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9111fefe-9531-4938-b674-cbabf283fe20 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.443871] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63b285d-969b-42eb-ad12-5a7c9bb61e60 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.450991] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf10810-237f-45e1-af76-d0a6fe19cda2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.464249] env[61356]: DEBUG nova.compute.provider_tree [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.472919] env[61356]: DEBUG nova.scheduler.client.report [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.487252] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.373s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.487769] env[61356]: ERROR nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1574.487769] env[61356]: Faults: ['InvalidArgument'] [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Traceback (most recent call last): [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self.driver.spawn(context, instance, image_meta, [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self._fetch_image_if_missing(context, vi) [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] image_cache(vi, tmp_image_ds_loc) [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] vm_util.copy_virtual_disk( [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] session._wait_for_task(vmdk_copy_task) [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] return self.wait_for_task(task_ref) [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] return evt.wait() [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] result = hub.switch() [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] return self.greenlet.switch() [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] self.f(*self.args, **self.kw) [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] raise exceptions.translate_fault(task_info.error) [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Faults: ['InvalidArgument'] [ 1574.487769] env[61356]: ERROR nova.compute.manager [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] [ 1574.488653] env[61356]: DEBUG nova.compute.utils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1574.489920] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Build of instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 was re-scheduled: A specified parameter was not correct: fileType [ 1574.489920] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1574.490396] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1574.532613] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1574.532613] env[61356]: DEBUG nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1574.532613] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1574.800073] env[61356]: DEBUG nova.network.neutron [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.811430] env[61356]: INFO nova.compute.manager [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Took 0.32 seconds to deallocate network for instance. [ 1574.899374] env[61356]: INFO nova.scheduler.client.report [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Deleted allocations for instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 [ 1574.918895] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e6dbe280-5f1f-42a5-a273-938ff8c2a3c4 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.523s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.920148] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 384.695s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.920446] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Acquiring lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.920665] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.920832] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.925709] env[61356]: INFO nova.compute.manager [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Terminating instance [ 1574.927206] env[61356]: DEBUG nova.compute.manager [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1574.927663] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1574.927730] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ede79d74-f6b7-4d0a-bd9c-d8cbb3cb65cf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.937609] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51535ae5-e880-4fd7-b233-1eb0d8993665 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.958490] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1574.970090] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c22e625c-3fd2-4280-9cc7-bb7e08d700d5 could not be found. [ 1574.970318] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1574.970497] env[61356]: INFO nova.compute.manager [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1574.970741] env[61356]: DEBUG oslo.service.loopingcall [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1574.971185] env[61356]: DEBUG nova.compute.manager [-] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1574.971341] env[61356]: DEBUG nova.network.neutron [-] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1575.008499] env[61356]: DEBUG nova.network.neutron [-] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.016948] env[61356]: INFO nova.compute.manager [-] [instance: c22e625c-3fd2-4280-9cc7-bb7e08d700d5] Took 0.05 seconds to deallocate network for instance. [ 1575.026188] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.026434] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.027894] env[61356]: INFO nova.compute.claims [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1575.113318] env[61356]: DEBUG oslo_concurrency.lockutils [None req-07bb6bec-c166-4cad-a99f-2341c47477c0 tempest-ServerRescueNegativeTestJSON-399370680 tempest-ServerRescueNegativeTestJSON-399370680-project-member] Lock "c22e625c-3fd2-4280-9cc7-bb7e08d700d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.269187] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20771bb-cc57-44c9-a0dc-40618c25e911 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.276759] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30249bd8-8fa8-4657-852b-864d76afdee5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.307210] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e950f32a-9e71-4852-a5bb-26d42b5d2584 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.313856] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e753a0a-e488-49e2-a9ee-ceb62769501c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.327033] env[61356]: DEBUG nova.compute.provider_tree [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1575.336062] env[61356]: DEBUG nova.scheduler.client.report [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1575.350076] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.323s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.350244] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1575.383963] env[61356]: DEBUG nova.compute.utils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.385435] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1575.385612] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1575.397953] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1575.450729] env[61356]: DEBUG nova.policy [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '877bb82afe8d42ae8f54d1e963c0244b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a8faf03438643ad85e91c93c88ce86d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1575.465835] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1575.491062] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1575.491309] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1575.491470] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1575.491652] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1575.491799] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1575.491947] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1575.492167] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1575.492329] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1575.492559] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1575.492749] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1575.492926] env[61356]: DEBUG nova.virt.hardware [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1575.493816] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649867d1-14a4-4464-8932-2e951bcfb443 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.501249] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80578a30-1d54-40ad-95f9-a58d696c5b04 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.757700] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Successfully created port: 29581d83-5c64-4bd0-a425-07b1dafa6d61 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.353127] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Successfully updated port: 29581d83-5c64-4bd0-a425-07b1dafa6d61 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1576.369824] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.369984] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.370151] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1576.415187] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1576.590727] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Updating instance_info_cache with network_info: [{"id": "29581d83-5c64-4bd0-a425-07b1dafa6d61", "address": "fa:16:3e:a1:dc:5b", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29581d83-5c", "ovs_interfaceid": "29581d83-5c64-4bd0-a425-07b1dafa6d61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.602247] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.602567] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance network_info: |[{"id": "29581d83-5c64-4bd0-a425-07b1dafa6d61", "address": "fa:16:3e:a1:dc:5b", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29581d83-5c", "ovs_interfaceid": "29581d83-5c64-4bd0-a425-07b1dafa6d61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1576.602989] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:dc:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29581d83-5c64-4bd0-a425-07b1dafa6d61', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1576.610705] env[61356]: DEBUG oslo.service.loopingcall [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1576.611582] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1576.611866] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.612488] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-395fcee1-553c-4eb9-be02-3e1c7793946d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.632275] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1576.632275] env[61356]: value = "task-4302633" [ 1576.632275] env[61356]: _type = "Task" [ 1576.632275] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.639947] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302633, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.862524] env[61356]: DEBUG nova.compute.manager [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Received event network-vif-plugged-29581d83-5c64-4bd0-a425-07b1dafa6d61 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1576.862836] env[61356]: DEBUG oslo_concurrency.lockutils [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] Acquiring lock "d6141da0-7822-45dc-902a-c9fc3ee73fef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.863068] env[61356]: DEBUG oslo_concurrency.lockutils [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.863269] env[61356]: DEBUG oslo_concurrency.lockutils [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.863460] env[61356]: DEBUG nova.compute.manager [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] No waiting events found dispatching network-vif-plugged-29581d83-5c64-4bd0-a425-07b1dafa6d61 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1576.863647] env[61356]: WARNING nova.compute.manager [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Received unexpected event network-vif-plugged-29581d83-5c64-4bd0-a425-07b1dafa6d61 for instance with vm_state building and task_state deleting. [ 1576.863816] env[61356]: DEBUG nova.compute.manager [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Received event network-changed-29581d83-5c64-4bd0-a425-07b1dafa6d61 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1576.863968] env[61356]: DEBUG nova.compute.manager [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Refreshing instance network info cache due to event network-changed-29581d83-5c64-4bd0-a425-07b1dafa6d61. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1576.864171] env[61356]: DEBUG oslo_concurrency.lockutils [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] Acquiring lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.864310] env[61356]: DEBUG oslo_concurrency.lockutils [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] Acquired lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.864471] env[61356]: DEBUG nova.network.neutron [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Refreshing network info cache for port 29581d83-5c64-4bd0-a425-07b1dafa6d61 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1577.133394] env[61356]: DEBUG nova.network.neutron [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Updated VIF entry in instance network info cache for port 29581d83-5c64-4bd0-a425-07b1dafa6d61. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1577.133786] env[61356]: DEBUG nova.network.neutron [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Updating instance_info_cache with network_info: [{"id": "29581d83-5c64-4bd0-a425-07b1dafa6d61", "address": "fa:16:3e:a1:dc:5b", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29581d83-5c", "ovs_interfaceid": "29581d83-5c64-4bd0-a425-07b1dafa6d61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.145359] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302633, 'name': CreateVM_Task, 'duration_secs': 0.280459} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.146084] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1577.146560] env[61356]: DEBUG oslo_concurrency.lockutils [req-100bf956-a97b-4d26-b983-e6d740a7c4c7 req-3691e5f6-d9c3-4f12-90f1-2d5336c06376 service nova] Releasing lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.147343] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.147526] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.147889] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1577.148393] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d82ce071-4c7d-4f3d-bc0c-1d1be9c1bd1e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.152752] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 1577.152752] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521c8ebb-517e-9c61-8f67-fa7dbb64a71b" [ 1577.152752] env[61356]: _type = "Task" [ 1577.152752] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.160907] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521c8ebb-517e-9c61-8f67-fa7dbb64a71b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.663569] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.663816] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1577.664050] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.758620] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1585.771046] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.771241] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.771408] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.771564] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1585.772713] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aac68b-8f53-41ef-8a56-4bf187438a36 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.782029] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f76b0d8-4337-4800-8d93-2cae13171d22 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.796454] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f3cd60-1bbd-45ea-a60e-626b85e2003c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.802478] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6d613e-f0ab-4569-8886-e794f6e58caa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.832469] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180686MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1585.832639] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.832864] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.905846] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3bc9faac-35bb-4303-937f-3682771e9258 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906040] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906191] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906344] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906472] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906592] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906708] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906825] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.906969] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.907124] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1585.918235] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1585.928564] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance aa69cd0e-f97a-4c63-8887-9998847329ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1585.937711] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1585.947471] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1585.956563] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1585.965648] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 344f33b2-9314-478a-b714-003090c9c342 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1585.965865] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1585.966015] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1586.147798] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76178751-e2c0-4e23-9f8c-7f64be20dd46 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.155063] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403ac29d-5b5a-4d52-a8b0-8b8e7723e3ae {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.186163] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd2c452-e5b8-4f68-a023-da6e6fdf2730 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.193353] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac584ce9-4591-421c-87a0-a78f5216d3a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.206187] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.215286] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1586.231275] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1586.231473] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.399s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.230625] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.688935] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.689556] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.757945] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.754573] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.758099] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.759059] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.759059] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.759059] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1592.758570] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.370606] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c9b44311-913a-4439-8fd2-4bcc9a42b118 tempest-ServersTestFqdnHostnames-981284514 tempest-ServersTestFqdnHostnames-981284514-project-member] Acquiring lock "ba07e43d-c654-4202-a392-743006b3e6fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.370913] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c9b44311-913a-4439-8fd2-4bcc9a42b118 tempest-ServersTestFqdnHostnames-981284514 tempest-ServersTestFqdnHostnames-981284514-project-member] Lock "ba07e43d-c654-4202-a392-743006b3e6fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.759019] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.759468] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1596.759468] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1596.779954] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.780128] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.780252] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.780378] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.780501] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.780645] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.780777] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.780898] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.781024] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.781144] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1596.781259] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1600.776384] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1622.014636] env[61356]: WARNING oslo_vmware.rw_handles [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1622.014636] env[61356]: ERROR oslo_vmware.rw_handles [ 1622.015464] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1622.017010] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1622.017258] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Copying Virtual Disk [datastore2] vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/2f80fa83-5271-4399-9606-743405ce857e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1622.017532] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-796743d3-2205-45d4-9fc8-a8e539733fca {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.024545] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Waiting for the task: (returnval){ [ 1622.024545] env[61356]: value = "task-4302634" [ 1622.024545] env[61356]: _type = "Task" [ 1622.024545] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.031996] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Task: {'id': task-4302634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.535032] env[61356]: DEBUG oslo_vmware.exceptions [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1622.535387] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1622.535965] env[61356]: ERROR nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.535965] env[61356]: Faults: ['InvalidArgument'] [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Traceback (most recent call last): [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] yield resources [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self.driver.spawn(context, instance, image_meta, [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self._fetch_image_if_missing(context, vi) [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] image_cache(vi, tmp_image_ds_loc) [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] vm_util.copy_virtual_disk( [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] session._wait_for_task(vmdk_copy_task) [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] return self.wait_for_task(task_ref) [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] return evt.wait() [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] result = hub.switch() [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] return self.greenlet.switch() [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self.f(*self.args, **self.kw) [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] raise exceptions.translate_fault(task_info.error) [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Faults: ['InvalidArgument'] [ 1622.535965] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] [ 1622.536967] env[61356]: INFO nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Terminating instance [ 1622.537842] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.538059] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1622.538298] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ceb5b839-1d3e-4863-92f3-06bd3457dcf6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.540416] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1622.540605] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1622.541345] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4d7218-c67a-497b-bc95-2bd8d5b70a0c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.548191] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1622.549095] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95fa524d-0783-4435-838f-4e3c8c0483e4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.550408] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1622.550580] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1622.551245] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-260e6389-2239-4c6d-bea1-b178d37a12e0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.556436] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Waiting for the task: (returnval){ [ 1622.556436] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5254a47a-b583-820a-c18d-bb0ddde2cbbb" [ 1622.556436] env[61356]: _type = "Task" [ 1622.556436] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.564378] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5254a47a-b583-820a-c18d-bb0ddde2cbbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.615690] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1622.615919] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1622.616078] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Deleting the datastore file [datastore2] 3bc9faac-35bb-4303-937f-3682771e9258 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1622.616364] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16dc6490-8d70-457c-ae46-33fccd2f71ff {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.622502] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Waiting for the task: (returnval){ [ 1622.622502] env[61356]: value = "task-4302636" [ 1622.622502] env[61356]: _type = "Task" [ 1622.622502] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.629567] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Task: {'id': task-4302636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.066261] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1623.066554] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Creating directory with path [datastore2] vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1623.066714] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b2bdfcf-2c39-4735-9037-b94964543d1c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.078321] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Created directory with path [datastore2] vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1623.078508] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Fetch image to [datastore2] vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1623.078675] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1623.079378] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51cf710-a2fa-47ff-b3b6-53289374f1b4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.085532] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529cccca-3d83-4fd2-abea-4850ea75f95d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.094211] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3bdce4-2487-4f6c-b131-0aca44f5a9b8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.127191] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0ae6e5-8cb0-4e37-ad19-df0c512435e2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.133897] env[61356]: DEBUG oslo_vmware.api [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Task: {'id': task-4302636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076604} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.135298] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1623.135483] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1623.135652] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1623.135823] env[61356]: INFO nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1623.137525] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e7699842-e49b-4528-8b86-f406f502a89d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.139359] env[61356]: DEBUG nova.compute.claims [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1623.139535] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.139752] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.162903] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1623.212743] env[61356]: DEBUG oslo_vmware.rw_handles [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1623.272026] env[61356]: DEBUG oslo_vmware.rw_handles [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1623.272026] env[61356]: DEBUG oslo_vmware.rw_handles [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1623.425256] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2668ee-5464-41ab-acf5-c5be36b85324 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.432508] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0059cbe3-c746-4311-88ee-10ce784ebec6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.460596] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f52f34d-41f3-4f95-82f1-dcc3128b1624 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.467105] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cf92a0-8d84-42d0-9417-4ee9deea810d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.479312] env[61356]: DEBUG nova.compute.provider_tree [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.487694] env[61356]: DEBUG nova.scheduler.client.report [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1623.502251] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.362s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.502466] env[61356]: ERROR nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1623.502466] env[61356]: Faults: ['InvalidArgument'] [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Traceback (most recent call last): [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self.driver.spawn(context, instance, image_meta, [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self._fetch_image_if_missing(context, vi) [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] image_cache(vi, tmp_image_ds_loc) [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] vm_util.copy_virtual_disk( [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] session._wait_for_task(vmdk_copy_task) [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] return self.wait_for_task(task_ref) [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] return evt.wait() [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] result = hub.switch() [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] return self.greenlet.switch() [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] self.f(*self.args, **self.kw) [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] raise exceptions.translate_fault(task_info.error) [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Faults: ['InvalidArgument'] [ 1623.502466] env[61356]: ERROR nova.compute.manager [instance: 3bc9faac-35bb-4303-937f-3682771e9258] [ 1623.503416] env[61356]: DEBUG nova.compute.utils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1623.504788] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Build of instance 3bc9faac-35bb-4303-937f-3682771e9258 was re-scheduled: A specified parameter was not correct: fileType [ 1623.504788] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1623.505194] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1623.505398] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1623.505578] env[61356]: DEBUG nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1623.505740] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1623.809244] env[61356]: DEBUG nova.network.neutron [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.821787] env[61356]: INFO nova.compute.manager [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Took 0.32 seconds to deallocate network for instance. [ 1623.921355] env[61356]: INFO nova.scheduler.client.report [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Deleted allocations for instance 3bc9faac-35bb-4303-937f-3682771e9258 [ 1623.942292] env[61356]: DEBUG oslo_concurrency.lockutils [None req-efb545c5-b127-4b69-b620-6fea54d04e7a tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "3bc9faac-35bb-4303-937f-3682771e9258" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.472s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.943456] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "3bc9faac-35bb-4303-937f-3682771e9258" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 389.377s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.943679] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "3bc9faac-35bb-4303-937f-3682771e9258-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.943884] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "3bc9faac-35bb-4303-937f-3682771e9258-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.944081] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "3bc9faac-35bb-4303-937f-3682771e9258-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.946070] env[61356]: INFO nova.compute.manager [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Terminating instance [ 1623.947719] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquiring lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.947875] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Acquired lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.948062] env[61356]: DEBUG nova.network.neutron [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1623.954983] env[61356]: DEBUG nova.compute.manager [None req-be4f8ff9-6822-4cf7-97fd-34bedac59cf2 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1623.977970] env[61356]: DEBUG nova.network.neutron [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1623.982695] env[61356]: DEBUG nova.compute.manager [None req-be4f8ff9-6822-4cf7-97fd-34bedac59cf2 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] [instance: 1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1624.020968] env[61356]: DEBUG oslo_concurrency.lockutils [None req-be4f8ff9-6822-4cf7-97fd-34bedac59cf2 tempest-ImagesTestJSON-1579121251 tempest-ImagesTestJSON-1579121251-project-member] Lock "1bbe9786-b3a8-4a8d-8db0-2c6fb87429a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.173s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.030800] env[61356]: DEBUG nova.compute.manager [None req-1279fcb8-504e-4f88-a854-a11f6c5e341b tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: aa69cd0e-f97a-4c63-8887-9998847329ec] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1624.063608] env[61356]: DEBUG nova.compute.manager [None req-1279fcb8-504e-4f88-a854-a11f6c5e341b tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: aa69cd0e-f97a-4c63-8887-9998847329ec] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1624.098885] env[61356]: DEBUG oslo_concurrency.lockutils [None req-1279fcb8-504e-4f88-a854-a11f6c5e341b tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "aa69cd0e-f97a-4c63-8887-9998847329ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.996s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.113192] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1624.131547] env[61356]: DEBUG nova.network.neutron [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.141396] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Releasing lock "refresh_cache-3bc9faac-35bb-4303-937f-3682771e9258" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.141863] env[61356]: DEBUG nova.compute.manager [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1624.142017] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1624.142572] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53f2c6c8-8247-418f-b850-80c284e668df {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.153845] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01edaedb-6692-4856-a769-39d207df399b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.184078] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3bc9faac-35bb-4303-937f-3682771e9258 could not be found. [ 1624.184287] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1624.184463] env[61356]: INFO nova.compute.manager [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1624.184702] env[61356]: DEBUG oslo.service.loopingcall [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.184923] env[61356]: DEBUG nova.compute.manager [-] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1624.185027] env[61356]: DEBUG nova.network.neutron [-] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1624.187180] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.187410] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.188805] env[61356]: INFO nova.compute.claims [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1624.210236] env[61356]: DEBUG nova.network.neutron [-] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1624.217196] env[61356]: DEBUG nova.network.neutron [-] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.227028] env[61356]: INFO nova.compute.manager [-] [instance: 3bc9faac-35bb-4303-937f-3682771e9258] Took 0.04 seconds to deallocate network for instance. [ 1624.326542] env[61356]: DEBUG oslo_concurrency.lockutils [None req-bd8d8148-e588-4717-8c5b-f24ee274b287 tempest-ServerActionsTestOtherB-1406106026 tempest-ServerActionsTestOtherB-1406106026-project-member] Lock "3bc9faac-35bb-4303-937f-3682771e9258" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.383s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.411939] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb3fef2-09db-4be3-b962-e142028b2c7c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.419785] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d29141-17b2-4223-945d-8c8e75fd4fc8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.448797] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0578183-369a-4903-bc42-a739e6c3e3c4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.455743] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b051b40e-6660-475d-8eba-a03079dccb7c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.467981] env[61356]: DEBUG nova.compute.provider_tree [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.476636] env[61356]: DEBUG nova.scheduler.client.report [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.489458] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.489873] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1624.522699] env[61356]: DEBUG nova.compute.utils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1624.524133] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1624.524303] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1624.533482] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1624.587071] env[61356]: DEBUG nova.policy [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcdd63fc241b43d8af563edd7d04d6f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa7d6c07edd8426495a7da1c85cefebb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1624.594090] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1624.619809] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1624.619967] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1624.620116] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1624.620261] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1624.620408] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1624.620553] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1624.620878] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1624.620952] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1624.621094] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1624.621262] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1624.621431] env[61356]: DEBUG nova.virt.hardware [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1624.622296] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca10151-63ce-4522-8b0e-f0fd1bec043f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.629406] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389c0501-b873-4645-a624-b5943d3c2f8a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.895210] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Successfully created port: 61a59475-3e30-4307-82ad-f74b2722db6b {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1625.524137] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Successfully updated port: 61a59475-3e30-4307-82ad-f74b2722db6b {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1625.538660] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "refresh_cache-4f103733-7586-4241-a766-cf13bef9e435" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.538813] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquired lock "refresh_cache-4f103733-7586-4241-a766-cf13bef9e435" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.538967] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1625.580549] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1625.746159] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Updating instance_info_cache with network_info: [{"id": "61a59475-3e30-4307-82ad-f74b2722db6b", "address": "fa:16:3e:51:97:1c", "network": {"id": "140aa7bc-d684-41f6-b7a3-673f0a30aed5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-463334014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa7d6c07edd8426495a7da1c85cefebb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a59475-3e", "ovs_interfaceid": "61a59475-3e30-4307-82ad-f74b2722db6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.760236] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Releasing lock "refresh_cache-4f103733-7586-4241-a766-cf13bef9e435" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.760548] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Instance network_info: |[{"id": "61a59475-3e30-4307-82ad-f74b2722db6b", "address": "fa:16:3e:51:97:1c", "network": {"id": "140aa7bc-d684-41f6-b7a3-673f0a30aed5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-463334014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa7d6c07edd8426495a7da1c85cefebb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a59475-3e", "ovs_interfaceid": "61a59475-3e30-4307-82ad-f74b2722db6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1625.760953] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:97:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2928baf1-3efb-4205-a786-d9783e51f699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61a59475-3e30-4307-82ad-f74b2722db6b', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1625.768601] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Creating folder: Project (aa7d6c07edd8426495a7da1c85cefebb). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1625.769121] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc74016d-b69b-4088-802a-67c919ebeb4c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.780350] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Created folder: Project (aa7d6c07edd8426495a7da1c85cefebb) in parent group-v846926. [ 1625.780533] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Creating folder: Instances. Parent ref: group-v847026. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1625.780744] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-584ab2c9-ea42-41b9-9e60-9cc2c78e8c41 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.789499] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Created folder: Instances in parent group-v847026. [ 1625.789717] env[61356]: DEBUG oslo.service.loopingcall [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.789888] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1625.790082] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-999776b4-afda-4022-8bbe-37ecee840be8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.808641] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1625.808641] env[61356]: value = "task-4302639" [ 1625.808641] env[61356]: _type = "Task" [ 1625.808641] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.816193] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302639, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.862619] env[61356]: DEBUG nova.compute.manager [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Received event network-vif-plugged-61a59475-3e30-4307-82ad-f74b2722db6b {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1625.862883] env[61356]: DEBUG oslo_concurrency.lockutils [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] Acquiring lock "4f103733-7586-4241-a766-cf13bef9e435-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.863135] env[61356]: DEBUG oslo_concurrency.lockutils [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] Lock "4f103733-7586-4241-a766-cf13bef9e435-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.863310] env[61356]: DEBUG oslo_concurrency.lockutils [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] Lock "4f103733-7586-4241-a766-cf13bef9e435-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.863507] env[61356]: DEBUG nova.compute.manager [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] No waiting events found dispatching network-vif-plugged-61a59475-3e30-4307-82ad-f74b2722db6b {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1625.863638] env[61356]: WARNING nova.compute.manager [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Received unexpected event network-vif-plugged-61a59475-3e30-4307-82ad-f74b2722db6b for instance with vm_state building and task_state spawning. [ 1625.863800] env[61356]: DEBUG nova.compute.manager [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Received event network-changed-61a59475-3e30-4307-82ad-f74b2722db6b {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1625.863954] env[61356]: DEBUG nova.compute.manager [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Refreshing instance network info cache due to event network-changed-61a59475-3e30-4307-82ad-f74b2722db6b. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1625.864352] env[61356]: DEBUG oslo_concurrency.lockutils [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] Acquiring lock "refresh_cache-4f103733-7586-4241-a766-cf13bef9e435" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.864501] env[61356]: DEBUG oslo_concurrency.lockutils [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] Acquired lock "refresh_cache-4f103733-7586-4241-a766-cf13bef9e435" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.864664] env[61356]: DEBUG nova.network.neutron [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Refreshing network info cache for port 61a59475-3e30-4307-82ad-f74b2722db6b {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1626.129909] env[61356]: DEBUG nova.network.neutron [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Updated VIF entry in instance network info cache for port 61a59475-3e30-4307-82ad-f74b2722db6b. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1626.130290] env[61356]: DEBUG nova.network.neutron [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Updating instance_info_cache with network_info: [{"id": "61a59475-3e30-4307-82ad-f74b2722db6b", "address": "fa:16:3e:51:97:1c", "network": {"id": "140aa7bc-d684-41f6-b7a3-673f0a30aed5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-463334014-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa7d6c07edd8426495a7da1c85cefebb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61a59475-3e", "ovs_interfaceid": "61a59475-3e30-4307-82ad-f74b2722db6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.139719] env[61356]: DEBUG oslo_concurrency.lockutils [req-16c8f73e-4df7-4048-b43a-7720ec8f71f3 req-61d4ecf7-6577-4f75-924e-4143d9645f5c service nova] Releasing lock "refresh_cache-4f103733-7586-4241-a766-cf13bef9e435" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.318839] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302639, 'name': CreateVM_Task, 'duration_secs': 0.291053} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.319608] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1626.319894] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.320072] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.320398] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1626.320638] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31af0d41-643e-4c78-aeb0-61be26739748 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.324879] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Waiting for the task: (returnval){ [ 1626.324879] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521254ba-3cf9-5cef-903a-a765e31a80d6" [ 1626.324879] env[61356]: _type = "Task" [ 1626.324879] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.331852] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]521254ba-3cf9-5cef-903a-a765e31a80d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.835421] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.835788] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1626.835890] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.041602] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "4f103733-7586-4241-a766-cf13bef9e435" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.758776] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1645.770441] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.770678] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.770850] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.771016] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1645.772245] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2ad531-f502-4196-927e-6a465f7d315d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.781157] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaea9fca-b0bf-498b-96fe-bdd19780238f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.795141] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f348b3-3b63-49a5-b081-b3709708122a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.802957] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c041296a-f436-48bd-883d-0a7354e76242 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.832213] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180673MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1645.832373] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.832568] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.917792] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7d397e6b-a49f-410a-afcc-23c4c462024b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.917992] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.918179] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.918370] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.918526] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.918674] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.918819] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.918961] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.919117] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.919273] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1645.932453] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.943756] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.955512] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 344f33b2-9314-478a-b714-003090c9c342 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.966719] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.976740] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ba07e43d-c654-4202-a392-743006b3e6fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1645.976969] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1645.977130] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1646.159960] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93664943-955a-4eca-9635-855c5e0462e6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.167492] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32dca4c-0a7d-4958-bb61-4abb60622d98 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.196484] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e11e858-37d6-46ee-949c-d395b0f24fc6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.203551] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba00055-dcd5-47e6-a27c-37f73a1f7dc9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.216315] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.226720] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1646.242051] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1646.242243] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.410s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.242826] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.753968] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.757657] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1651.757936] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.758734] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.758277] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.758564] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.758727] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1656.759801] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.760113] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1656.760113] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1656.782060] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.782163] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.782304] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.782435] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.782552] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.782668] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.782783] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.782897] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.783015] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.783134] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1656.783251] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1672.035032] env[61356]: WARNING oslo_vmware.rw_handles [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.035032] env[61356]: ERROR oslo_vmware.rw_handles [ 1672.035032] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1672.036758] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1672.037009] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Copying Virtual Disk [datastore2] vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/e4f98936-94d7-4980-9fa5-0c43bc69f753/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1672.037296] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3b1078e-a3a5-4343-b47d-594a949440dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.046947] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Waiting for the task: (returnval){ [ 1672.046947] env[61356]: value = "task-4302640" [ 1672.046947] env[61356]: _type = "Task" [ 1672.046947] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.055094] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Task: {'id': task-4302640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.557119] env[61356]: DEBUG oslo_vmware.exceptions [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1672.557414] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.557979] env[61356]: ERROR nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1672.557979] env[61356]: Faults: ['InvalidArgument'] [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Traceback (most recent call last): [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] yield resources [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self.driver.spawn(context, instance, image_meta, [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self._fetch_image_if_missing(context, vi) [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] image_cache(vi, tmp_image_ds_loc) [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] vm_util.copy_virtual_disk( [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] session._wait_for_task(vmdk_copy_task) [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] return self.wait_for_task(task_ref) [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] return evt.wait() [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] result = hub.switch() [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] return self.greenlet.switch() [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self.f(*self.args, **self.kw) [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] raise exceptions.translate_fault(task_info.error) [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Faults: ['InvalidArgument'] [ 1672.557979] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] [ 1672.558999] env[61356]: INFO nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Terminating instance [ 1672.559914] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.560112] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1672.560391] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d2972d0-190a-4deb-ae7d-16810568fee2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.562684] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1672.562870] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1672.563629] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f48d537-f899-4f30-a48a-7aaece2661c5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.569972] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1672.570194] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adb5bc99-2b77-4fe4-b33a-d2e875890d82 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.572183] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1672.572354] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1672.573290] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba32cb04-f287-4047-8ae6-ee62e9c634ae {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.577945] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Waiting for the task: (returnval){ [ 1672.577945] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5217c0d9-7099-cc4b-006c-cd698680beaa" [ 1672.577945] env[61356]: _type = "Task" [ 1672.577945] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.586742] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5217c0d9-7099-cc4b-006c-cd698680beaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.640027] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1672.640283] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1672.640468] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Deleting the datastore file [datastore2] 7d397e6b-a49f-410a-afcc-23c4c462024b {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1672.640741] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06ba3057-3fc8-4243-8933-867d4997fac9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.646613] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Waiting for the task: (returnval){ [ 1672.646613] env[61356]: value = "task-4302642" [ 1672.646613] env[61356]: _type = "Task" [ 1672.646613] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.654288] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Task: {'id': task-4302642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.088179] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1673.088514] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Creating directory with path [datastore2] vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.088649] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2e5ce5e-0104-4b1f-b3f7-769e7e137a1f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.099475] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Created directory with path [datastore2] vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.099652] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Fetch image to [datastore2] vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1673.099819] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1673.100528] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119bfa49-338a-4298-8f27-3074c045c4b7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.106664] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115003cf-6823-46f7-a26c-836dc2bf0d4c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.115310] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee89ed17-c049-49ad-8464-ba068419d9a3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.146189] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74120a1c-8890-41d6-9c5a-77f22d86379e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.155621] env[61356]: DEBUG oslo_vmware.api [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Task: {'id': task-4302642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08226} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.156714] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.157284] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1673.157284] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1673.157284] env[61356]: INFO nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1673.159030] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4d0d7ed9-5a8f-4963-8f3d-0de2d4bd27bf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.160889] env[61356]: DEBUG nova.compute.claims [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1673.161047] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.161262] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.182500] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1673.234511] env[61356]: DEBUG oslo_vmware.rw_handles [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1673.292375] env[61356]: DEBUG oslo_vmware.rw_handles [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1673.292545] env[61356]: DEBUG oslo_vmware.rw_handles [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1673.441353] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e9c23c-05da-4611-a2bf-5cb056462603 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.450031] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6790693b-3505-46fc-90c0-f2ddfc360c8f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.478534] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abb5fc8-572a-4517-be14-3faaff2c366e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.485709] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8350450f-88cb-460b-846c-ac24d414d559 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.498512] env[61356]: DEBUG nova.compute.provider_tree [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.509088] env[61356]: DEBUG nova.scheduler.client.report [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1673.523687] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.362s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.523825] env[61356]: ERROR nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.523825] env[61356]: Faults: ['InvalidArgument'] [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Traceback (most recent call last): [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self.driver.spawn(context, instance, image_meta, [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self._fetch_image_if_missing(context, vi) [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] image_cache(vi, tmp_image_ds_loc) [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] vm_util.copy_virtual_disk( [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] session._wait_for_task(vmdk_copy_task) [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] return self.wait_for_task(task_ref) [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] return evt.wait() [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] result = hub.switch() [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] return self.greenlet.switch() [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] self.f(*self.args, **self.kw) [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] raise exceptions.translate_fault(task_info.error) [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Faults: ['InvalidArgument'] [ 1673.523825] env[61356]: ERROR nova.compute.manager [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] [ 1673.524636] env[61356]: DEBUG nova.compute.utils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1673.526022] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Build of instance 7d397e6b-a49f-410a-afcc-23c4c462024b was re-scheduled: A specified parameter was not correct: fileType [ 1673.526022] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1673.529258] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1673.529258] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1673.529258] env[61356]: DEBUG nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1673.529258] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1673.954040] env[61356]: DEBUG nova.network.neutron [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.965947] env[61356]: INFO nova.compute.manager [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Took 0.44 seconds to deallocate network for instance. [ 1674.067239] env[61356]: INFO nova.scheduler.client.report [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Deleted allocations for instance 7d397e6b-a49f-410a-afcc-23c4c462024b [ 1674.091978] env[61356]: DEBUG oslo_concurrency.lockutils [None req-557f329e-a3a4-498b-8e81-32b55bafe3b8 tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.941s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.093169] env[61356]: DEBUG oslo_concurrency.lockutils [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.650s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.093425] env[61356]: DEBUG oslo_concurrency.lockutils [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Acquiring lock "7d397e6b-a49f-410a-afcc-23c4c462024b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.093685] env[61356]: DEBUG oslo_concurrency.lockutils [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.093855] env[61356]: DEBUG oslo_concurrency.lockutils [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.096275] env[61356]: INFO nova.compute.manager [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Terminating instance [ 1674.097885] env[61356]: DEBUG nova.compute.manager [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1674.098099] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1674.098649] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-43fbcba2-fe6b-4520-830d-7fdac0966e5b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.109016] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37ef1b2-3c54-433b-897e-c11b0dbeba04 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.119669] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1674.140100] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d397e6b-a49f-410a-afcc-23c4c462024b could not be found. [ 1674.140300] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1674.140475] env[61356]: INFO nova.compute.manager [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1674.140712] env[61356]: DEBUG oslo.service.loopingcall [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1674.140935] env[61356]: DEBUG nova.compute.manager [-] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1674.141040] env[61356]: DEBUG nova.network.neutron [-] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1674.164038] env[61356]: DEBUG nova.network.neutron [-] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.171536] env[61356]: INFO nova.compute.manager [-] [instance: 7d397e6b-a49f-410a-afcc-23c4c462024b] Took 0.03 seconds to deallocate network for instance. [ 1674.173686] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.173921] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.175594] env[61356]: INFO nova.compute.claims [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1674.256061] env[61356]: DEBUG oslo_concurrency.lockutils [None req-37f902ea-9647-438b-b8c9-f0df64be2d1a tempest-ServerPasswordTestJSON-67351418 tempest-ServerPasswordTestJSON-67351418-project-member] Lock "7d397e6b-a49f-410a-afcc-23c4c462024b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.380542] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9892f98-1c96-4859-b3c0-68d28ce8fd77 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.388161] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91dbbb3-d107-438f-8c12-893d5f5cc647 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.418537] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e6cd8f-52b8-4654-a891-751a9cd3299b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.425197] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c45e22b-6715-47a8-a1ac-1e5aa97a9ff6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.437901] env[61356]: DEBUG nova.compute.provider_tree [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.446376] env[61356]: DEBUG nova.scheduler.client.report [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1674.460867] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.287s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.461365] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1674.494173] env[61356]: DEBUG nova.compute.utils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1674.495603] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1674.495776] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1674.507787] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1674.571764] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1674.574740] env[61356]: DEBUG nova.policy [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a03361c3e7a43489d130de44b2c9ce0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f203092fb2b4138bee8c200c4049a24', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1674.596459] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1674.596459] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1674.596651] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1674.596795] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1674.596937] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1674.597103] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1674.597310] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1674.597873] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1674.597873] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1674.597873] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1674.598029] env[61356]: DEBUG nova.virt.hardware [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1674.598824] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5eaef8-cf5e-4670-bb71-63416ec56fe2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.606684] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a05919-10a2-447f-9454-dafbd61e74dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.164734] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Successfully created port: 89f39c36-deca-491a-9be2-24592d224dca {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1675.774277] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Successfully updated port: 89f39c36-deca-491a-9be2-24592d224dca {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1675.786479] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "refresh_cache-bc3bcf07-24bf-4cf6-874e-69e24b615458" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.786632] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired lock "refresh_cache-bc3bcf07-24bf-4cf6-874e-69e24b615458" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.786779] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1675.832932] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1676.001227] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Updating instance_info_cache with network_info: [{"id": "89f39c36-deca-491a-9be2-24592d224dca", "address": "fa:16:3e:69:05:0a", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89f39c36-de", "ovs_interfaceid": "89f39c36-deca-491a-9be2-24592d224dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.012757] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Releasing lock "refresh_cache-bc3bcf07-24bf-4cf6-874e-69e24b615458" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.013044] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Instance network_info: |[{"id": "89f39c36-deca-491a-9be2-24592d224dca", "address": "fa:16:3e:69:05:0a", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89f39c36-de", "ovs_interfaceid": "89f39c36-deca-491a-9be2-24592d224dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1676.013449] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:05:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89f39c36-deca-491a-9be2-24592d224dca', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1676.021898] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating folder: Project (1f203092fb2b4138bee8c200c4049a24). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1676.022424] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0b2683a-85a5-4e4d-aee4-aee1298ad28c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.034948] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Created folder: Project (1f203092fb2b4138bee8c200c4049a24) in parent group-v846926. [ 1676.035168] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating folder: Instances. Parent ref: group-v847029. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1676.035421] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b30c8c93-86fb-428c-8207-947bc2a531c0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.044494] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Created folder: Instances in parent group-v847029. [ 1676.044726] env[61356]: DEBUG oslo.service.loopingcall [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1676.044916] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1676.045156] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5e3d738-c25e-4c53-9742-5b2801141d70 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.064695] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1676.064695] env[61356]: value = "task-4302645" [ 1676.064695] env[61356]: _type = "Task" [ 1676.064695] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.072449] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302645, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.169722] env[61356]: DEBUG nova.compute.manager [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Received event network-vif-plugged-89f39c36-deca-491a-9be2-24592d224dca {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1676.169722] env[61356]: DEBUG oslo_concurrency.lockutils [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] Acquiring lock "bc3bcf07-24bf-4cf6-874e-69e24b615458-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.169722] env[61356]: DEBUG oslo_concurrency.lockutils [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.170115] env[61356]: DEBUG oslo_concurrency.lockutils [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.170115] env[61356]: DEBUG nova.compute.manager [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] No waiting events found dispatching network-vif-plugged-89f39c36-deca-491a-9be2-24592d224dca {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1676.170177] env[61356]: WARNING nova.compute.manager [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Received unexpected event network-vif-plugged-89f39c36-deca-491a-9be2-24592d224dca for instance with vm_state building and task_state spawning. [ 1676.170323] env[61356]: DEBUG nova.compute.manager [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Received event network-changed-89f39c36-deca-491a-9be2-24592d224dca {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1676.170513] env[61356]: DEBUG nova.compute.manager [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Refreshing instance network info cache due to event network-changed-89f39c36-deca-491a-9be2-24592d224dca. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1676.170717] env[61356]: DEBUG oslo_concurrency.lockutils [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] Acquiring lock "refresh_cache-bc3bcf07-24bf-4cf6-874e-69e24b615458" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.170855] env[61356]: DEBUG oslo_concurrency.lockutils [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] Acquired lock "refresh_cache-bc3bcf07-24bf-4cf6-874e-69e24b615458" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.171058] env[61356]: DEBUG nova.network.neutron [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Refreshing network info cache for port 89f39c36-deca-491a-9be2-24592d224dca {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1676.439192] env[61356]: DEBUG nova.network.neutron [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Updated VIF entry in instance network info cache for port 89f39c36-deca-491a-9be2-24592d224dca. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1676.439553] env[61356]: DEBUG nova.network.neutron [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Updating instance_info_cache with network_info: [{"id": "89f39c36-deca-491a-9be2-24592d224dca", "address": "fa:16:3e:69:05:0a", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89f39c36-de", "ovs_interfaceid": "89f39c36-deca-491a-9be2-24592d224dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.448913] env[61356]: DEBUG oslo_concurrency.lockutils [req-345847a5-d885-445a-be36-6109e9da38ca req-17ea83bb-58d5-4931-a0f3-e6a79046df5a service nova] Releasing lock "refresh_cache-bc3bcf07-24bf-4cf6-874e-69e24b615458" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.574263] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302645, 'name': CreateVM_Task, 'duration_secs': 0.291506} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.574436] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1676.575127] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.575299] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.575635] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1676.575913] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64de3cea-f7b7-47dd-a85a-9f63b400764d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.580065] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 1676.580065] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52b203d2-d316-94e0-187d-7d68f2c497eb" [ 1676.580065] env[61356]: _type = "Task" [ 1676.580065] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.587340] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52b203d2-d316-94e0-187d-7d68f2c497eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.091320] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.091640] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1677.091894] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.170393] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.759386] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1705.767020] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1705.778870] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.779099] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.779271] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.779424] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1705.780510] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d89b49b-8ace-4ff5-b147-2d70ab1dfbd6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.789273] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea66f54-977c-41f7-8c87-b7794c4b82c2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.802839] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d101442-1b27-4f35-afda-a24d6b0a0a5b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.809092] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d613e90f-a49a-460a-b314-6c61209f1ed8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.837472] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180684MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1705.837615] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.837803] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.910212] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.910357] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.910485] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.910605] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.910721] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.910835] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.910950] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.911076] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.911192] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.911307] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1705.922402] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1705.933955] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 344f33b2-9314-478a-b714-003090c9c342 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1705.943677] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1705.953426] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ba07e43d-c654-4202-a392-743006b3e6fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1705.953658] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1705.953821] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1706.112147] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05190e5c-7cd7-4626-a4ac-a26539791d80 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.119465] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7182c8b-26e5-47c1-96fa-3c6aa4140842 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.150942] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce85260-def9-470c-b339-bfaef1f07c62 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.157651] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec55b93-3a7f-42cc-ad07-99f68d346077 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.170294] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1706.178873] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1706.192055] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1706.192241] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.354s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.185051] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1711.753821] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1711.757410] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.758496] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.758786] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.758900] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1713.759058] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1715.758792] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.758701] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.759024] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1717.759024] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1717.779269] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.779421] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.779575] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.779660] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.779790] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.779918] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.780027] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.780195] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.780278] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.780377] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1717.780496] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1717.780976] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.781132] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 1717.790041] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] There are 0 instances to clean {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 1720.197685] env[61356]: WARNING oslo_vmware.rw_handles [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1720.197685] env[61356]: ERROR oslo_vmware.rw_handles [ 1720.198575] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1720.200392] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1720.200644] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Copying Virtual Disk [datastore2] vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/c5f9e142-8c44-454f-9fdd-0d1e2413b309/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1720.200927] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5f1b407-c7c4-4c27-9770-41552415b81d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.209557] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Waiting for the task: (returnval){ [ 1720.209557] env[61356]: value = "task-4302646" [ 1720.209557] env[61356]: _type = "Task" [ 1720.209557] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.216960] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Task: {'id': task-4302646, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.720302] env[61356]: DEBUG oslo_vmware.exceptions [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1720.720533] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.721118] env[61356]: ERROR nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1720.721118] env[61356]: Faults: ['InvalidArgument'] [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Traceback (most recent call last): [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] yield resources [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self.driver.spawn(context, instance, image_meta, [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self._fetch_image_if_missing(context, vi) [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] image_cache(vi, tmp_image_ds_loc) [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] vm_util.copy_virtual_disk( [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] session._wait_for_task(vmdk_copy_task) [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] return self.wait_for_task(task_ref) [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] return evt.wait() [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] result = hub.switch() [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] return self.greenlet.switch() [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self.f(*self.args, **self.kw) [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] raise exceptions.translate_fault(task_info.error) [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Faults: ['InvalidArgument'] [ 1720.721118] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] [ 1720.722188] env[61356]: INFO nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Terminating instance [ 1720.723017] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.723262] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.723503] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0044cfb-a45b-4dfd-8b66-affe11ee0d98 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.725644] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1720.725831] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1720.726550] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea54582d-4e7e-4e7e-a81c-0ddcb467c63f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.734403] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1720.734638] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d9c9756-1431-4126-bd8b-44539c099e27 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.736695] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.736863] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1720.737782] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79a22fbf-0e19-49f0-adc2-bf6457735549 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.742365] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 1720.742365] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52923c3b-dec8-c63b-1b92-486bd5c0d1f3" [ 1720.742365] env[61356]: _type = "Task" [ 1720.742365] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.756439] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1720.756697] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating directory with path [datastore2] vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.756918] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eabd2dea-fe74-4e13-a504-0cde3486d0a5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.777533] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Created directory with path [datastore2] vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.777733] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Fetch image to [datastore2] vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1720.777900] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1720.778671] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7e3c7d-219c-4ff2-adc4-1147c6a1bf57 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.785178] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1720.787202] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43543afc-c44a-4bf8-b7d6-8debeb66f4c8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.796277] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615186f1-12dc-43c2-a01b-b9117fe4c010 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.830886] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8707b8fe-c30f-4b32-923f-4d33a7f893ce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.833457] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1720.833649] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1720.833820] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Deleting the datastore file [datastore2] ec2e64e5-9c48-46e1-9010-78eb2c52ede6 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1720.834322] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47f9b41e-e052-42c6-aae4-6a4563febc7d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.839492] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a5397239-7734-439f-83b5-4db943e1d286 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.841941] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Waiting for the task: (returnval){ [ 1720.841941] env[61356]: value = "task-4302648" [ 1720.841941] env[61356]: _type = "Task" [ 1720.841941] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.849286] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Task: {'id': task-4302648, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.861901] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1720.913290] env[61356]: DEBUG oslo_vmware.rw_handles [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1720.973153] env[61356]: DEBUG oslo_vmware.rw_handles [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1720.973342] env[61356]: DEBUG oslo_vmware.rw_handles [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1721.352693] env[61356]: DEBUG oslo_vmware.api [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Task: {'id': task-4302648, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069021} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.353212] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1721.353364] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1721.353631] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1721.353888] env[61356]: INFO nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1721.356066] env[61356]: DEBUG nova.compute.claims [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1721.356319] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.356607] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.574648] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103e1501-8012-44fd-b145-6a68fb27d788 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.582237] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3096e0fb-014c-4895-bbb0-78f8a968f20f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.615960] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810ed94c-1958-4269-8505-3ba66d44f928 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.622937] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e40c33-fe11-420a-97f4-624bd6bdbf05 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.635331] env[61356]: DEBUG nova.compute.provider_tree [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.642725] env[61356]: DEBUG nova.scheduler.client.report [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1721.661565] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.662109] env[61356]: ERROR nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.662109] env[61356]: Faults: ['InvalidArgument'] [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Traceback (most recent call last): [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self.driver.spawn(context, instance, image_meta, [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self._fetch_image_if_missing(context, vi) [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] image_cache(vi, tmp_image_ds_loc) [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] vm_util.copy_virtual_disk( [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] session._wait_for_task(vmdk_copy_task) [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] return self.wait_for_task(task_ref) [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] return evt.wait() [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] result = hub.switch() [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] return self.greenlet.switch() [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] self.f(*self.args, **self.kw) [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] raise exceptions.translate_fault(task_info.error) [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Faults: ['InvalidArgument'] [ 1721.662109] env[61356]: ERROR nova.compute.manager [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] [ 1721.663585] env[61356]: DEBUG nova.compute.utils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1721.664412] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Build of instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 was re-scheduled: A specified parameter was not correct: fileType [ 1721.664412] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1721.664766] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1721.664932] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1721.665097] env[61356]: DEBUG nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1721.665259] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1722.000749] env[61356]: DEBUG nova.network.neutron [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.011580] env[61356]: INFO nova.compute.manager [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Took 0.35 seconds to deallocate network for instance. [ 1722.111349] env[61356]: INFO nova.scheduler.client.report [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Deleted allocations for instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 [ 1722.136916] env[61356]: DEBUG oslo_concurrency.lockutils [None req-5afd2e34-00e6-441c-b6af-d6b5ac977b0b tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.505s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.138112] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.180s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.138344] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Acquiring lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.138552] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.138720] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.140875] env[61356]: INFO nova.compute.manager [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Terminating instance [ 1722.142575] env[61356]: DEBUG nova.compute.manager [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1722.142775] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1722.143435] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56313160-e1dc-4582-89f5-d4d10e494d00 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.153515] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c38abbe-483a-4421-91f2-092ef404811a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.166209] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1722.182061] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ec2e64e5-9c48-46e1-9010-78eb2c52ede6 could not be found. [ 1722.182209] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1722.182391] env[61356]: INFO nova.compute.manager [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1722.182628] env[61356]: DEBUG oslo.service.loopingcall [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.182857] env[61356]: DEBUG nova.compute.manager [-] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1722.182951] env[61356]: DEBUG nova.network.neutron [-] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1722.210766] env[61356]: DEBUG nova.network.neutron [-] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.212666] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.212925] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.214385] env[61356]: INFO nova.compute.claims [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1722.219231] env[61356]: INFO nova.compute.manager [-] [instance: ec2e64e5-9c48-46e1-9010-78eb2c52ede6] Took 0.04 seconds to deallocate network for instance. [ 1722.313181] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8639f353-3cdb-4e01-acb5-767d24acf426 tempest-ServerDiagnosticsNegativeTest-1466726825 tempest-ServerDiagnosticsNegativeTest-1466726825-project-member] Lock "ec2e64e5-9c48-46e1-9010-78eb2c52ede6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.408612] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6154c0e-c78e-4179-8e9f-91015af8c6c3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.416209] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818950f7-4c9e-4410-a611-c3fdc0052603 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.445935] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f974bb-7c72-47b6-97c6-a3ca7e9169a3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.452530] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529172bc-4aac-4141-96b9-2b902fbdadc2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.465259] env[61356]: DEBUG nova.compute.provider_tree [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.475425] env[61356]: DEBUG nova.scheduler.client.report [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.488550] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.489013] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1722.523744] env[61356]: DEBUG nova.compute.utils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1722.525159] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1722.525326] env[61356]: DEBUG nova.network.neutron [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1722.534873] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1722.595385] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1722.608283] env[61356]: DEBUG nova.policy [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a03361c3e7a43489d130de44b2c9ce0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f203092fb2b4138bee8c200c4049a24', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1722.623362] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1722.623600] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1722.623808] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1722.623967] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1722.624123] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1722.624279] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1722.624484] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1722.624641] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1722.624804] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1722.624989] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1722.625282] env[61356]: DEBUG nova.virt.hardware [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1722.626071] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45301b9e-0703-43db-8ca1-9817de8afbb7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.635125] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0b115d-4b32-4f30-93b0-42e6dd62bfd7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.917346] env[61356]: DEBUG nova.network.neutron [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Successfully created port: 80441e3c-8bd4-4a78-9594-a1edb5ca644d {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1723.528444] env[61356]: DEBUG nova.network.neutron [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Successfully updated port: 80441e3c-8bd4-4a78-9594-a1edb5ca644d {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1723.543717] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "refresh_cache-d53189a7-249f-48ea-ab30-3ee6242e0dc0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.543717] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired lock "refresh_cache-d53189a7-249f-48ea-ab30-3ee6242e0dc0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.543717] env[61356]: DEBUG nova.network.neutron [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1723.583834] env[61356]: DEBUG nova.network.neutron [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1723.761835] env[61356]: DEBUG nova.network.neutron [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Updating instance_info_cache with network_info: [{"id": "80441e3c-8bd4-4a78-9594-a1edb5ca644d", "address": "fa:16:3e:b4:ee:2b", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80441e3c-8b", "ovs_interfaceid": "80441e3c-8bd4-4a78-9594-a1edb5ca644d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.772665] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Releasing lock "refresh_cache-d53189a7-249f-48ea-ab30-3ee6242e0dc0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.772974] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Instance network_info: |[{"id": "80441e3c-8bd4-4a78-9594-a1edb5ca644d", "address": "fa:16:3e:b4:ee:2b", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80441e3c-8b", "ovs_interfaceid": "80441e3c-8bd4-4a78-9594-a1edb5ca644d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1723.773404] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:ee:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80441e3c-8bd4-4a78-9594-a1edb5ca644d', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1723.781301] env[61356]: DEBUG oslo.service.loopingcall [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.781797] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1723.782112] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efd21b00-885a-41ce-8ba7-13d988d77c05 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.803719] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1723.803719] env[61356]: value = "task-4302649" [ 1723.803719] env[61356]: _type = "Task" [ 1723.803719] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.812428] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302649, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.056312] env[61356]: DEBUG nova.compute.manager [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Received event network-vif-plugged-80441e3c-8bd4-4a78-9594-a1edb5ca644d {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1724.056484] env[61356]: DEBUG oslo_concurrency.lockutils [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] Acquiring lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.056689] env[61356]: DEBUG oslo_concurrency.lockutils [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.056850] env[61356]: DEBUG oslo_concurrency.lockutils [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.057019] env[61356]: DEBUG nova.compute.manager [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] No waiting events found dispatching network-vif-plugged-80441e3c-8bd4-4a78-9594-a1edb5ca644d {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1724.057190] env[61356]: WARNING nova.compute.manager [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Received unexpected event network-vif-plugged-80441e3c-8bd4-4a78-9594-a1edb5ca644d for instance with vm_state building and task_state spawning. [ 1724.057347] env[61356]: DEBUG nova.compute.manager [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Received event network-changed-80441e3c-8bd4-4a78-9594-a1edb5ca644d {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1724.057537] env[61356]: DEBUG nova.compute.manager [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Refreshing instance network info cache due to event network-changed-80441e3c-8bd4-4a78-9594-a1edb5ca644d. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1724.057722] env[61356]: DEBUG oslo_concurrency.lockutils [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] Acquiring lock "refresh_cache-d53189a7-249f-48ea-ab30-3ee6242e0dc0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.057855] env[61356]: DEBUG oslo_concurrency.lockutils [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] Acquired lock "refresh_cache-d53189a7-249f-48ea-ab30-3ee6242e0dc0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.058019] env[61356]: DEBUG nova.network.neutron [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Refreshing network info cache for port 80441e3c-8bd4-4a78-9594-a1edb5ca644d {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1724.308560] env[61356]: DEBUG nova.network.neutron [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Updated VIF entry in instance network info cache for port 80441e3c-8bd4-4a78-9594-a1edb5ca644d. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1724.308894] env[61356]: DEBUG nova.network.neutron [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Updating instance_info_cache with network_info: [{"id": "80441e3c-8bd4-4a78-9594-a1edb5ca644d", "address": "fa:16:3e:b4:ee:2b", "network": {"id": "f27cb501-a49b-46e4-b4eb-54622b8a16c8", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.234", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b8f636a049984f5791c955c43f06725d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80441e3c-8b", "ovs_interfaceid": "80441e3c-8bd4-4a78-9594-a1edb5ca644d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.318127] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302649, 'name': CreateVM_Task, 'duration_secs': 0.274817} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.318770] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1724.319198] env[61356]: DEBUG oslo_concurrency.lockutils [req-1e0aa2ed-f893-4cac-8b58-b7ad80bbc26b req-488b8b26-f05a-4efe-bc9c-3a9a920894e7 service nova] Releasing lock "refresh_cache-d53189a7-249f-48ea-ab30-3ee6242e0dc0" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.319894] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.320066] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.320377] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1724.320825] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63b9ebb0-a902-4729-9564-ef87a2c42c11 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.325525] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 1724.325525] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5270e5b7-b66b-aea0-32d2-03bbabef44e3" [ 1724.325525] env[61356]: _type = "Task" [ 1724.325525] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.336891] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5270e5b7-b66b-aea0-32d2-03bbabef44e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.836090] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.836474] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1724.836537] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.987336] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.010730] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Getting list of instances from cluster (obj){ [ 1725.010730] env[61356]: value = "domain-c8" [ 1725.010730] env[61356]: _type = "ClusterComputeResource" [ 1725.010730] env[61356]: } {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1725.012053] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082b0e1a-c5a9-4591-8600-7340615987e6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.031265] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Got total of 10 instances {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1725.031265] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031265] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid f8e4391b-3224-42a7-9962-eaaf2230d70d {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031265] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031265] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 2cc941f9-1184-4ce8-89b9-acb3b537b918 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031673] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 83829089-1eb5-4337-9d67-71c81c9bbbcd {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031673] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 9958f378-1cab-49bf-8375-5e95c16401e4 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031673] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid d6141da0-7822-45dc-902a-c9fc3ee73fef {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031814] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 4f103733-7586-4241-a766-cf13bef9e435 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.031873] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid bc3bcf07-24bf-4cf6-874e-69e24b615458 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.032025] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid d53189a7-249f-48ea-ab30-3ee6242e0dc0 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 1725.032360] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.032601] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.032804] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.032995] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.033217] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.033413] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "9958f378-1cab-49bf-8375-5e95c16401e4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.033606] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.033801] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "4f103733-7586-4241-a766-cf13bef9e435" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.034020] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.034289] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.758955] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.759343] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances with incomplete migration {{(pid=61356) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 1740.028567] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.746633] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "13efd018-f491-4979-87b8-0b4af68aaf1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.747010] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.768646] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1765.780568] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.780748] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.780919] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.781093] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1765.782247] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b2d744-9e26-4be0-b5b2-409654bf1c3d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.791033] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1bc19a-fa6b-4819-a8db-29ab93a017bf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.805027] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a65439b-bc53-4a26-9f9c-b31e2638b36a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.811153] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79090c96-6acf-4a19-824e-82c6fdc5fcfa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.839711] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180684MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1765.839832] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.840020] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.916058] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.916239] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f8e4391b-3224-42a7-9962-eaaf2230d70d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.916403] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.916544] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.916676] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.916796] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.916912] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.917040] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.917157] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.917269] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1765.949916] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1765.961604] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance ba07e43d-c654-4202-a392-743006b3e6fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1765.971570] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1765.971788] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1765.971933] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1766.157077] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f512e3-6a84-45fb-bcf9-323d59a8967f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.164314] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45921f34-749a-4d57-90bd-78d4d8fa901b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.193368] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6287315f-e06d-457e-8cfa-5e9d42202a50 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.200701] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e29991-ca0d-443c-a55a-c930d26dfa26 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.213215] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1766.221262] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1766.235596] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1766.235739] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.396s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.782156] env[61356]: WARNING oslo_vmware.rw_handles [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1770.782156] env[61356]: ERROR oslo_vmware.rw_handles [ 1770.782735] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1770.784941] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1770.785159] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Copying Virtual Disk [datastore2] vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/94f76e2b-8cae-4e3a-88e0-152eb717dd7d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1770.785842] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cba35a97-1c04-4b95-9927-801b942f750e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.794331] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 1770.794331] env[61356]: value = "task-4302650" [ 1770.794331] env[61356]: _type = "Task" [ 1770.794331] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.801903] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': task-4302650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.304554] env[61356]: DEBUG oslo_vmware.exceptions [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1771.304850] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.305459] env[61356]: ERROR nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1771.305459] env[61356]: Faults: ['InvalidArgument'] [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Traceback (most recent call last): [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] yield resources [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self.driver.spawn(context, instance, image_meta, [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self._fetch_image_if_missing(context, vi) [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] image_cache(vi, tmp_image_ds_loc) [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] vm_util.copy_virtual_disk( [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] session._wait_for_task(vmdk_copy_task) [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] return self.wait_for_task(task_ref) [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] return evt.wait() [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] result = hub.switch() [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] return self.greenlet.switch() [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self.f(*self.args, **self.kw) [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] raise exceptions.translate_fault(task_info.error) [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Faults: ['InvalidArgument'] [ 1771.305459] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] [ 1771.306734] env[61356]: INFO nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Terminating instance [ 1771.307478] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.307690] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1771.307924] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db3d17f6-cb6a-45bd-b85e-ae9f292c2715 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.310019] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1771.310267] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1771.310983] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1372dd8-906b-4579-8d80-512f13ee7008 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.317412] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1771.317618] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac5d70e1-cd4d-4ab7-9c96-e40ea0bb5c6e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.319643] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1771.319848] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1771.320771] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e336a85-c5a2-43b0-8f29-b062a05bb6af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.325238] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Waiting for the task: (returnval){ [ 1771.325238] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527a9f3f-47c0-e85b-b258-f721981bbeee" [ 1771.325238] env[61356]: _type = "Task" [ 1771.325238] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.332949] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527a9f3f-47c0-e85b-b258-f721981bbeee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.384018] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1771.384282] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1771.384534] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Deleting the datastore file [datastore2] f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1771.384808] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f23306e-965a-411a-9af4-9d1cb33d8a4c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.391383] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 1771.391383] env[61356]: value = "task-4302652" [ 1771.391383] env[61356]: _type = "Task" [ 1771.391383] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.399324] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': task-4302652, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.834855] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1771.835201] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Creating directory with path [datastore2] vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1771.835359] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b68cdd3-e381-4474-987f-4ed28e8dc52c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.846445] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Created directory with path [datastore2] vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1771.846609] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Fetch image to [datastore2] vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1771.846778] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1771.847495] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ab4746-42bc-45ff-add7-bc6f60b18548 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.853543] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a39f05b-101c-4668-aac9-e44df404c6af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.862073] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91be725b-15e8-4b2f-b6af-88db5925ac91 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.892225] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf3491b-d521-4f20-a186-25304fab6be1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.902990] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fe0c5763-fe00-4331-adcd-6ad49bdea750 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.904584] env[61356]: DEBUG oslo_vmware.api [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': task-4302652, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065556} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.904818] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1771.904992] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1771.905190] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1771.905363] env[61356]: INFO nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1771.907471] env[61356]: DEBUG nova.compute.claims [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1771.907618] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.907824] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.931083] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1771.972949] env[61356]: DEBUG nova.scheduler.client.report [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Refreshing inventories for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1771.983090] env[61356]: DEBUG oslo_vmware.rw_handles [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1772.036998] env[61356]: DEBUG nova.scheduler.client.report [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Updating ProviderTree inventory for provider cfb833eb-84f8-480d-866a-1ad2fb871484 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1772.037242] env[61356]: DEBUG nova.compute.provider_tree [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Updating inventory in ProviderTree for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1772.041208] env[61356]: DEBUG oslo_vmware.rw_handles [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1772.041394] env[61356]: DEBUG oslo_vmware.rw_handles [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1772.049101] env[61356]: DEBUG nova.scheduler.client.report [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Refreshing aggregate associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, aggregates: None {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1772.068290] env[61356]: DEBUG nova.scheduler.client.report [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Refreshing trait associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1772.213207] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744aa0bc-7dc7-4670-821b-ff4775ff78db {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.220673] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1af61ab-218d-4be8-a333-35e3cc3e2495 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.225135] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.249672] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08785dd4-f695-4471-a654-a84129365ca5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.256275] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4587738-4d18-4bfb-9ede-52830826da5c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.269010] env[61356]: DEBUG nova.compute.provider_tree [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.278468] env[61356]: DEBUG nova.scheduler.client.report [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1772.294104] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.386s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.294695] env[61356]: ERROR nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1772.294695] env[61356]: Faults: ['InvalidArgument'] [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Traceback (most recent call last): [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self.driver.spawn(context, instance, image_meta, [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self._fetch_image_if_missing(context, vi) [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] image_cache(vi, tmp_image_ds_loc) [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] vm_util.copy_virtual_disk( [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] session._wait_for_task(vmdk_copy_task) [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] return self.wait_for_task(task_ref) [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] return evt.wait() [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] result = hub.switch() [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] return self.greenlet.switch() [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] self.f(*self.args, **self.kw) [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] raise exceptions.translate_fault(task_info.error) [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Faults: ['InvalidArgument'] [ 1772.294695] env[61356]: ERROR nova.compute.manager [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] [ 1772.295689] env[61356]: DEBUG nova.compute.utils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1772.296784] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Build of instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 was re-scheduled: A specified parameter was not correct: fileType [ 1772.296784] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1772.297194] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1772.297379] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1772.297552] env[61356]: DEBUG nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1772.297715] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1772.626841] env[61356]: DEBUG nova.network.neutron [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.639814] env[61356]: INFO nova.compute.manager [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Took 0.34 seconds to deallocate network for instance. [ 1772.751064] env[61356]: INFO nova.scheduler.client.report [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Deleted allocations for instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 [ 1772.757106] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.758289] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.779988] env[61356]: DEBUG oslo_concurrency.lockutils [None req-fc616f54-df6b-4386-8105-8ce3b3db7aa2 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 610.031s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.780734] env[61356]: DEBUG oslo_concurrency.lockutils [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 413.947s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.780734] env[61356]: DEBUG oslo_concurrency.lockutils [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.783747] env[61356]: DEBUG oslo_concurrency.lockutils [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.783747] env[61356]: DEBUG oslo_concurrency.lockutils [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.783942] env[61356]: INFO nova.compute.manager [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Terminating instance [ 1772.787711] env[61356]: DEBUG nova.compute.manager [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1772.787920] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1772.788200] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-330520a4-63e1-4673-bcff-d55ed0f2e905 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.792152] env[61356]: DEBUG nova.compute.manager [None req-2f206047-c256-41e6-8ce5-f1e490e2b6a5 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 344f33b2-9314-478a-b714-003090c9c342] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1772.799965] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a73a815-6c25-4a76-b70a-7181227b933b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.829750] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f7f3e0ba-44c8-43a1-a8fa-36527092d3c4 could not be found. [ 1772.829948] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1772.830135] env[61356]: INFO nova.compute.manager [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1772.830373] env[61356]: DEBUG oslo.service.loopingcall [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.830845] env[61356]: DEBUG nova.compute.manager [None req-2f206047-c256-41e6-8ce5-f1e490e2b6a5 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 344f33b2-9314-478a-b714-003090c9c342] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1772.831847] env[61356]: DEBUG nova.compute.manager [-] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1772.831952] env[61356]: DEBUG nova.network.neutron [-] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1772.857506] env[61356]: DEBUG nova.network.neutron [-] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.866332] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2f206047-c256-41e6-8ce5-f1e490e2b6a5 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "344f33b2-9314-478a-b714-003090c9c342" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.873s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.868036] env[61356]: INFO nova.compute.manager [-] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] Took 0.04 seconds to deallocate network for instance. [ 1772.875641] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1772.945564] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.945884] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.948066] env[61356]: INFO nova.compute.claims [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1772.982472] env[61356]: DEBUG oslo_concurrency.lockutils [None req-95ad25b5-612f-46e3-8be4-ebc7cc87f3d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.202s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.983343] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 47.951s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.983674] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f7f3e0ba-44c8-43a1-a8fa-36527092d3c4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1772.983730] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "f7f3e0ba-44c8-43a1-a8fa-36527092d3c4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.155965] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404eaad4-1284-4f08-931b-caa248dffa1d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.163875] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd112d8-cc47-478a-baf1-3ccefecf1a7b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.193836] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faef5254-539c-4285-b0e1-9ab0769f7177 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.201181] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3600e10a-c9aa-4751-babc-787c7653c252 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.213892] env[61356]: DEBUG nova.compute.provider_tree [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.223436] env[61356]: DEBUG nova.scheduler.client.report [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1773.237589] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.238103] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1773.280222] env[61356]: DEBUG nova.compute.utils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1773.281447] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1773.281610] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1773.294053] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1773.340945] env[61356]: DEBUG nova.policy [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02fb0b02ccf0433e9f740f4b7177ab34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f6c093c00e44197ba6affbc1c4964bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1773.361272] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1773.386057] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1773.386277] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1773.386439] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1773.386611] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1773.386760] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1773.386904] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1773.387119] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1773.387281] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1773.387448] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1773.387612] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1773.387780] env[61356]: DEBUG nova.virt.hardware [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1773.388694] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e1e524-7bdc-448e-b211-d3bfcd61ee1d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.396269] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a106d4c7-f263-4232-acba-d89e56d59c52 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.495015] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.495015] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.757418] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.840880] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Successfully created port: d524e42e-8122-4735-bf7a-a7e478740707 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1774.476595] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Successfully updated port: d524e42e-8122-4735-bf7a-a7e478740707 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1774.488694] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "refresh_cache-d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.488694] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired lock "refresh_cache-d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.488694] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1774.531951] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1774.692809] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Updating instance_info_cache with network_info: [{"id": "d524e42e-8122-4735-bf7a-a7e478740707", "address": "fa:16:3e:df:b6:bf", "network": {"id": "cf4b8e3d-edbd-4eff-a69d-181d9b32fa8e", "bridge": "br-int", "label": "tempest-ServersTestJSON-832994017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f6c093c00e44197ba6affbc1c4964bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd524e42e-81", "ovs_interfaceid": "d524e42e-8122-4735-bf7a-a7e478740707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.705584] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Releasing lock "refresh_cache-d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.705891] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Instance network_info: |[{"id": "d524e42e-8122-4735-bf7a-a7e478740707", "address": "fa:16:3e:df:b6:bf", "network": {"id": "cf4b8e3d-edbd-4eff-a69d-181d9b32fa8e", "bridge": "br-int", "label": "tempest-ServersTestJSON-832994017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f6c093c00e44197ba6affbc1c4964bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd524e42e-81", "ovs_interfaceid": "d524e42e-8122-4735-bf7a-a7e478740707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1774.706310] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:b6:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02092ea4-bae0-4e42-b0ab-abc365b4395a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd524e42e-8122-4735-bf7a-a7e478740707', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1774.713789] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating folder: Project (2f6c093c00e44197ba6affbc1c4964bf). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1774.714324] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10e338f6-abe1-46b0-9e22-ba870c3d2ba3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.725457] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Created folder: Project (2f6c093c00e44197ba6affbc1c4964bf) in parent group-v846926. [ 1774.725654] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating folder: Instances. Parent ref: group-v847033. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1774.725866] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c5e03de-1152-4187-b94b-3b0ef8c3d255 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.734335] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Created folder: Instances in parent group-v847033. [ 1774.734576] env[61356]: DEBUG oslo.service.loopingcall [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.734752] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1774.734940] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38dd66f3-7495-467e-8d8d-cc0aa47f6638 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.753188] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1774.753188] env[61356]: value = "task-4302655" [ 1774.753188] env[61356]: _type = "Task" [ 1774.753188] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.760094] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.760277] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302655, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.782466] env[61356]: DEBUG nova.compute.manager [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Received event network-vif-plugged-d524e42e-8122-4735-bf7a-a7e478740707 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1774.782625] env[61356]: DEBUG oslo_concurrency.lockutils [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] Acquiring lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.782832] env[61356]: DEBUG oslo_concurrency.lockutils [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.783025] env[61356]: DEBUG oslo_concurrency.lockutils [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.783204] env[61356]: DEBUG nova.compute.manager [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] No waiting events found dispatching network-vif-plugged-d524e42e-8122-4735-bf7a-a7e478740707 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1774.783383] env[61356]: WARNING nova.compute.manager [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Received unexpected event network-vif-plugged-d524e42e-8122-4735-bf7a-a7e478740707 for instance with vm_state building and task_state spawning. [ 1774.783547] env[61356]: DEBUG nova.compute.manager [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Received event network-changed-d524e42e-8122-4735-bf7a-a7e478740707 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1774.783718] env[61356]: DEBUG nova.compute.manager [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Refreshing instance network info cache due to event network-changed-d524e42e-8122-4735-bf7a-a7e478740707. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1774.783908] env[61356]: DEBUG oslo_concurrency.lockutils [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] Acquiring lock "refresh_cache-d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.784101] env[61356]: DEBUG oslo_concurrency.lockutils [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] Acquired lock "refresh_cache-d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.784292] env[61356]: DEBUG nova.network.neutron [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Refreshing network info cache for port d524e42e-8122-4735-bf7a-a7e478740707 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1775.053032] env[61356]: DEBUG nova.network.neutron [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Updated VIF entry in instance network info cache for port d524e42e-8122-4735-bf7a-a7e478740707. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1775.053032] env[61356]: DEBUG nova.network.neutron [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Updating instance_info_cache with network_info: [{"id": "d524e42e-8122-4735-bf7a-a7e478740707", "address": "fa:16:3e:df:b6:bf", "network": {"id": "cf4b8e3d-edbd-4eff-a69d-181d9b32fa8e", "bridge": "br-int", "label": "tempest-ServersTestJSON-832994017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f6c093c00e44197ba6affbc1c4964bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd524e42e-81", "ovs_interfaceid": "d524e42e-8122-4735-bf7a-a7e478740707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.064947] env[61356]: DEBUG oslo_concurrency.lockutils [req-e87863c9-79c1-4584-8996-211a19bf4fdf req-644f61f0-5439-4542-901e-d8cbb61ccefc service nova] Releasing lock "refresh_cache-d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.262623] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302655, 'name': CreateVM_Task, 'duration_secs': 0.280379} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.264051] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1775.264051] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.264051] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.264236] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.264408] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abf327e9-5efd-4ebd-b81c-ca3884e76c72 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.268575] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for the task: (returnval){ [ 1775.268575] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52df0581-b464-17a0-cc25-0116e901ef98" [ 1775.268575] env[61356]: _type = "Task" [ 1775.268575] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.275960] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52df0581-b464-17a0-cc25-0116e901ef98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.757676] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.758106] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.758106] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1775.778820] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.779072] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1775.779288] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.758748] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.760692] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1779.760692] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1779.783753] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.783912] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784049] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784181] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784304] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784422] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784572] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784697] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784815] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.784931] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1779.785060] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1785.293256] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.039562] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.039990] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.665357] env[61356]: WARNING oslo_vmware.rw_handles [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1818.665357] env[61356]: ERROR oslo_vmware.rw_handles [ 1818.665924] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1818.668409] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1818.668692] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Copying Virtual Disk [datastore2] vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/afecdfbd-faae-4afb-ac54-30a5b512cff5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1818.669040] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca44518b-6948-480b-9049-bde58eb6ad3c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.676598] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Waiting for the task: (returnval){ [ 1818.676598] env[61356]: value = "task-4302656" [ 1818.676598] env[61356]: _type = "Task" [ 1818.676598] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.684059] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Task: {'id': task-4302656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.187098] env[61356]: DEBUG oslo_vmware.exceptions [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1819.188061] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.188262] env[61356]: ERROR nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1819.188262] env[61356]: Faults: ['InvalidArgument'] [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Traceback (most recent call last): [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] yield resources [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self.driver.spawn(context, instance, image_meta, [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self._fetch_image_if_missing(context, vi) [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] image_cache(vi, tmp_image_ds_loc) [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] vm_util.copy_virtual_disk( [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] session._wait_for_task(vmdk_copy_task) [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] return self.wait_for_task(task_ref) [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] return evt.wait() [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] result = hub.switch() [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] return self.greenlet.switch() [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self.f(*self.args, **self.kw) [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] raise exceptions.translate_fault(task_info.error) [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Faults: ['InvalidArgument'] [ 1819.188262] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] [ 1819.189395] env[61356]: INFO nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Terminating instance [ 1819.190271] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.190478] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1819.190714] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d699b602-112e-4b22-92ce-ded0f7602de4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.193907] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1819.194122] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1819.194832] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570448ac-1957-4d3d-8572-6e7473dddf57 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.201820] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1819.202051] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8804af0-60b8-4ca4-9f80-709da0924dcb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.204184] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1819.204359] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1819.205317] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-243c6f15-ee30-4a0c-b046-40c2b36f973b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.210214] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Waiting for the task: (returnval){ [ 1819.210214] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]526ab47d-12fd-252f-14c5-2924e0355acb" [ 1819.210214] env[61356]: _type = "Task" [ 1819.210214] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.217061] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]526ab47d-12fd-252f-14c5-2924e0355acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.280739] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1819.281011] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1819.281238] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Deleting the datastore file [datastore2] f8e4391b-3224-42a7-9962-eaaf2230d70d {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1819.281500] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97da34a8-bca9-4f43-beca-af83d8001457 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.288365] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Waiting for the task: (returnval){ [ 1819.288365] env[61356]: value = "task-4302658" [ 1819.288365] env[61356]: _type = "Task" [ 1819.288365] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.296159] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Task: {'id': task-4302658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.720989] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1819.721306] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Creating directory with path [datastore2] vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1819.721519] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-073c6b5e-c428-4f1f-87f7-f133e5612375 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.733149] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Created directory with path [datastore2] vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1819.733352] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Fetch image to [datastore2] vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1819.733525] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1819.734261] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916c218d-909a-4473-a6f2-d8d7c0e2e5dc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.740723] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18c7c19-3ba1-43a7-9bf0-10d97c8f2c50 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.749449] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2bfdd7-bfbc-45e9-a64e-460ccec36653 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.780723] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c4b0f5-1dc9-4427-b4a3-f2825b2998cd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.786447] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f084293c-f521-4309-a77c-ddb25fe1466c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.796749] env[61356]: DEBUG oslo_vmware.api [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Task: {'id': task-4302658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068287} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.797037] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1819.797261] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1819.797436] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1819.797610] env[61356]: INFO nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1819.799789] env[61356]: DEBUG nova.compute.claims [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1819.799957] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.800331] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.811759] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1819.943014] env[61356]: DEBUG oslo_vmware.rw_handles [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1820.002514] env[61356]: DEBUG oslo_vmware.rw_handles [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1820.002728] env[61356]: DEBUG oslo_vmware.rw_handles [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1820.074926] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ec03c6-7c77-4a27-abf0-3a0569c81efc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.082957] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba497cf-7e62-4bab-b5f5-5c641e729610 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.113417] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb6bba4-48b8-4a90-a413-ffe9ef68af9c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.121122] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c99ed4-d3d3-4d9e-a8b2-9176351e26b1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.135386] env[61356]: DEBUG nova.compute.provider_tree [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1820.145467] env[61356]: DEBUG nova.scheduler.client.report [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1820.159597] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.359s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.160120] env[61356]: ERROR nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1820.160120] env[61356]: Faults: ['InvalidArgument'] [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Traceback (most recent call last): [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self.driver.spawn(context, instance, image_meta, [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self._fetch_image_if_missing(context, vi) [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] image_cache(vi, tmp_image_ds_loc) [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] vm_util.copy_virtual_disk( [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] session._wait_for_task(vmdk_copy_task) [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] return self.wait_for_task(task_ref) [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] return evt.wait() [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] result = hub.switch() [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] return self.greenlet.switch() [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] self.f(*self.args, **self.kw) [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] raise exceptions.translate_fault(task_info.error) [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Faults: ['InvalidArgument'] [ 1820.160120] env[61356]: ERROR nova.compute.manager [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] [ 1820.161019] env[61356]: DEBUG nova.compute.utils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1820.162248] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Build of instance f8e4391b-3224-42a7-9962-eaaf2230d70d was re-scheduled: A specified parameter was not correct: fileType [ 1820.162248] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1820.162662] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1820.162874] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1820.163110] env[61356]: DEBUG nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1820.163295] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1820.567146] env[61356]: DEBUG nova.network.neutron [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.578759] env[61356]: INFO nova.compute.manager [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Took 0.41 seconds to deallocate network for instance. [ 1820.676728] env[61356]: INFO nova.scheduler.client.report [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Deleted allocations for instance f8e4391b-3224-42a7-9962-eaaf2230d70d [ 1820.699831] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b5a1aeed-ff37-44a2-9258-6f6d9a7dcaed tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.432s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.700922] env[61356]: DEBUG oslo_concurrency.lockutils [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 430.676s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.701155] env[61356]: DEBUG oslo_concurrency.lockutils [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Acquiring lock "f8e4391b-3224-42a7-9962-eaaf2230d70d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.701357] env[61356]: DEBUG oslo_concurrency.lockutils [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.701519] env[61356]: DEBUG oslo_concurrency.lockutils [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.703656] env[61356]: INFO nova.compute.manager [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Terminating instance [ 1820.705341] env[61356]: DEBUG nova.compute.manager [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1820.705541] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1820.706077] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f5a74cb-559e-4d35-8d9f-2e928596c4ee {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.712133] env[61356]: DEBUG nova.compute.manager [None req-c9b44311-913a-4439-8fd2-4bcc9a42b118 tempest-ServersTestFqdnHostnames-981284514 tempest-ServersTestFqdnHostnames-981284514-project-member] [instance: ba07e43d-c654-4202-a392-743006b3e6fd] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1820.718910] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69205443-9278-43c6-bfad-7b98450a33cd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.735831] env[61356]: DEBUG nova.compute.manager [None req-c9b44311-913a-4439-8fd2-4bcc9a42b118 tempest-ServersTestFqdnHostnames-981284514 tempest-ServersTestFqdnHostnames-981284514-project-member] [instance: ba07e43d-c654-4202-a392-743006b3e6fd] Instance disappeared before build. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1820.748638] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f8e4391b-3224-42a7-9962-eaaf2230d70d could not be found. [ 1820.748855] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1820.749047] env[61356]: INFO nova.compute.manager [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1820.749294] env[61356]: DEBUG oslo.service.loopingcall [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1820.749513] env[61356]: DEBUG nova.compute.manager [-] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1820.749608] env[61356]: DEBUG nova.network.neutron [-] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1820.762021] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c9b44311-913a-4439-8fd2-4bcc9a42b118 tempest-ServersTestFqdnHostnames-981284514 tempest-ServersTestFqdnHostnames-981284514-project-member] Lock "ba07e43d-c654-4202-a392-743006b3e6fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.391s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.778492] env[61356]: DEBUG nova.network.neutron [-] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.788284] env[61356]: INFO nova.compute.manager [-] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] Took 0.04 seconds to deallocate network for instance. [ 1820.790287] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1820.844326] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.844570] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.845976] env[61356]: INFO nova.compute.claims [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1820.885125] env[61356]: DEBUG oslo_concurrency.lockutils [None req-30cbc118-30d9-4288-acde-2b0a7c6704ab tempest-ServerAddressesTestJSON-1260576647 tempest-ServerAddressesTestJSON-1260576647-project-member] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.886253] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 95.854s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.886441] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f8e4391b-3224-42a7-9962-eaaf2230d70d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1820.886613] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "f8e4391b-3224-42a7-9962-eaaf2230d70d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.031052] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04bfbb1-1833-40df-8db1-d053736a7d75 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.038167] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb442dc-00c4-4530-921a-8014db0a42f4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.067769] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c30a50e-e15e-4964-9a6a-8fb83f6ed62e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.074716] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7906c662-1e8a-4fb4-b3c0-45197f425adb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.087070] env[61356]: DEBUG nova.compute.provider_tree [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1821.096629] env[61356]: DEBUG nova.scheduler.client.report [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1821.131553] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.287s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.131846] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1821.170022] env[61356]: DEBUG nova.compute.utils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1821.171344] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1821.171506] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1821.182209] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1821.238492] env[61356]: DEBUG nova.policy [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '89c2ea569ffb416eb170549aaebb704a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fe9dfd77dff47cab342d5ebb764e770', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1821.255871] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1821.281821] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1821.282072] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1821.282235] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1821.282417] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1821.282563] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1821.282706] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1821.282923] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1821.283100] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1821.283268] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1821.283428] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1821.283596] env[61356]: DEBUG nova.virt.hardware [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1821.284460] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769f841e-e041-4af0-911f-982a2ee23b87 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.292812] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f673028-1249-44de-81ac-13f06ffa82e0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.553974] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Successfully created port: d9f04af6-9989-4ea2-9b91-22c8cefbb26c {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1822.148280] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Successfully updated port: d9f04af6-9989-4ea2-9b91-22c8cefbb26c {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1822.158631] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "refresh_cache-13efd018-f491-4979-87b8-0b4af68aaf1b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.158797] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "refresh_cache-13efd018-f491-4979-87b8-0b4af68aaf1b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.158922] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1822.201255] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1822.376208] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Updating instance_info_cache with network_info: [{"id": "d9f04af6-9989-4ea2-9b91-22c8cefbb26c", "address": "fa:16:3e:0a:17:98", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9f04af6-99", "ovs_interfaceid": "d9f04af6-9989-4ea2-9b91-22c8cefbb26c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.388275] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "refresh_cache-13efd018-f491-4979-87b8-0b4af68aaf1b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.388556] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Instance network_info: |[{"id": "d9f04af6-9989-4ea2-9b91-22c8cefbb26c", "address": "fa:16:3e:0a:17:98", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9f04af6-99", "ovs_interfaceid": "d9f04af6-9989-4ea2-9b91-22c8cefbb26c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1822.388965] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:17:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae4e3171-21cd-4094-b6cf-81bf366c75bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9f04af6-9989-4ea2-9b91-22c8cefbb26c', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1822.396401] env[61356]: DEBUG oslo.service.loopingcall [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1822.396851] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1822.397088] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e282b63-0bd9-4fb0-b7ef-330be9457086 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.416535] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1822.416535] env[61356]: value = "task-4302659" [ 1822.416535] env[61356]: _type = "Task" [ 1822.416535] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.424072] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302659, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.607568] env[61356]: DEBUG nova.compute.manager [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Received event network-vif-plugged-d9f04af6-9989-4ea2-9b91-22c8cefbb26c {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1822.607808] env[61356]: DEBUG oslo_concurrency.lockutils [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] Acquiring lock "13efd018-f491-4979-87b8-0b4af68aaf1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.608035] env[61356]: DEBUG oslo_concurrency.lockutils [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.608272] env[61356]: DEBUG oslo_concurrency.lockutils [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.608454] env[61356]: DEBUG nova.compute.manager [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] No waiting events found dispatching network-vif-plugged-d9f04af6-9989-4ea2-9b91-22c8cefbb26c {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1822.608620] env[61356]: WARNING nova.compute.manager [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Received unexpected event network-vif-plugged-d9f04af6-9989-4ea2-9b91-22c8cefbb26c for instance with vm_state building and task_state spawning. [ 1822.608781] env[61356]: DEBUG nova.compute.manager [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Received event network-changed-d9f04af6-9989-4ea2-9b91-22c8cefbb26c {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1822.608934] env[61356]: DEBUG nova.compute.manager [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Refreshing instance network info cache due to event network-changed-d9f04af6-9989-4ea2-9b91-22c8cefbb26c. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1822.609141] env[61356]: DEBUG oslo_concurrency.lockutils [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] Acquiring lock "refresh_cache-13efd018-f491-4979-87b8-0b4af68aaf1b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.609282] env[61356]: DEBUG oslo_concurrency.lockutils [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] Acquired lock "refresh_cache-13efd018-f491-4979-87b8-0b4af68aaf1b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.609442] env[61356]: DEBUG nova.network.neutron [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Refreshing network info cache for port d9f04af6-9989-4ea2-9b91-22c8cefbb26c {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1822.926667] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302659, 'name': CreateVM_Task, 'duration_secs': 0.306942} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.926839] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1822.939343] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.939524] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.939847] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1822.940121] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98497952-94ab-4607-8beb-6969794f8d9b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.944966] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 1822.944966] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52721257-a09f-af5f-8edc-99d365f32900" [ 1822.944966] env[61356]: _type = "Task" [ 1822.944966] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.953159] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52721257-a09f-af5f-8edc-99d365f32900, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.084054] env[61356]: DEBUG nova.network.neutron [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Updated VIF entry in instance network info cache for port d9f04af6-9989-4ea2-9b91-22c8cefbb26c. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1823.084427] env[61356]: DEBUG nova.network.neutron [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Updating instance_info_cache with network_info: [{"id": "d9f04af6-9989-4ea2-9b91-22c8cefbb26c", "address": "fa:16:3e:0a:17:98", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9f04af6-99", "ovs_interfaceid": "d9f04af6-9989-4ea2-9b91-22c8cefbb26c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.094434] env[61356]: DEBUG oslo_concurrency.lockutils [req-4b683b5d-5ba1-4b47-8e68-f84d254a6f55 req-f4f657de-0fe3-41b4-8f20-82e74e1422dd service nova] Releasing lock "refresh_cache-13efd018-f491-4979-87b8-0b4af68aaf1b" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.455174] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.455470] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1823.455642] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.758416] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.769854] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.770098] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.770297] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.770458] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1826.771607] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89f85ab-d164-4a80-87af-ff10f04adaa4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.781450] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6beed15e-cd06-47c2-beff-8645fa78a0f7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.795055] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d114a3b-7a09-4365-aa16-1ae9c1eeaa6d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.801257] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51f0751-bbc3-4662-bc69-d934efea9e8f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.829346] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180683MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1826.829484] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.829665] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.900739] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.900899] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901036] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901166] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901286] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901402] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901516] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901628] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901739] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.901849] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1826.912658] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1826.923630] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1826.923828] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1826.924170] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1827.057497] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c93a241-4baa-46e2-809d-0c8178962d9b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.065998] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8869ec-9f36-4699-9acd-d25aa84898a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.095522] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d274c137-7793-4e16-9096-c615a4368253 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.102832] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3572895-75db-4e76-9ae9-607aa6555d9d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.115437] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1827.123928] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1827.138394] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1827.138603] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.309s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.138262] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.753690] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.757367] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.757551] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.758668] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.758468] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.759055] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1837.759642] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1840.757969] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1840.758360] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1840.758360] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1840.780865] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781020] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781160] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781289] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781413] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781535] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781654] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781774] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.781893] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.782022] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1840.782151] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1842.778651] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1868.094598] env[61356]: WARNING oslo_vmware.rw_handles [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1868.094598] env[61356]: ERROR oslo_vmware.rw_handles [ 1868.095459] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1868.097146] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1868.097396] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Copying Virtual Disk [datastore2] vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/d8731569-3180-412c-b9a4-089c37d4019e/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1868.097682] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5568aa4d-c6f1-41d7-a36f-b2af74f88e64 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.105854] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Waiting for the task: (returnval){ [ 1868.105854] env[61356]: value = "task-4302660" [ 1868.105854] env[61356]: _type = "Task" [ 1868.105854] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.113621] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Task: {'id': task-4302660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.615790] env[61356]: DEBUG oslo_vmware.exceptions [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1868.616149] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.616736] env[61356]: ERROR nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.616736] env[61356]: Faults: ['InvalidArgument'] [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Traceback (most recent call last): [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] yield resources [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self.driver.spawn(context, instance, image_meta, [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self._fetch_image_if_missing(context, vi) [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] image_cache(vi, tmp_image_ds_loc) [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] vm_util.copy_virtual_disk( [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] session._wait_for_task(vmdk_copy_task) [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] return self.wait_for_task(task_ref) [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] return evt.wait() [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] result = hub.switch() [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] return self.greenlet.switch() [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self.f(*self.args, **self.kw) [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] raise exceptions.translate_fault(task_info.error) [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Faults: ['InvalidArgument'] [ 1868.616736] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] [ 1868.617746] env[61356]: INFO nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Terminating instance [ 1868.618801] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.619086] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1868.619247] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5143abed-8aa2-49ec-a0e7-ce6c98a1098f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.621302] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1868.621495] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1868.622196] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75417e9e-b1b2-4f5c-9593-216ae47afaf1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.629635] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1868.629867] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf8b42bd-715f-488d-b99a-adbc1042cf7b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.631878] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1868.632016] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1868.632926] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5bf73a1-58d4-4d35-a96c-c3b422f1111c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.637233] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for the task: (returnval){ [ 1868.637233] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52cfca78-7230-5fe3-8c29-afce87e5699d" [ 1868.637233] env[61356]: _type = "Task" [ 1868.637233] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.644074] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52cfca78-7230-5fe3-8c29-afce87e5699d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.698465] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1868.698686] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1868.698862] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Deleting the datastore file [datastore2] 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1868.699119] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b46af32f-b23b-47c7-b6a1-078acc923aca {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.704704] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Waiting for the task: (returnval){ [ 1868.704704] env[61356]: value = "task-4302662" [ 1868.704704] env[61356]: _type = "Task" [ 1868.704704] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.711799] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Task: {'id': task-4302662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.147542] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1869.147921] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Creating directory with path [datastore2] vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.147968] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f34f24a8-5e47-4513-8225-b8b95ccbda5d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.158761] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Created directory with path [datastore2] vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.158989] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Fetch image to [datastore2] vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1869.159219] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1869.159933] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4fd6fc-77ee-4a35-b80e-e46c03049108 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.165997] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160bfad4-4d75-408b-bf48-1493e37d094d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.174599] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6391f5-48f8-41bc-8351-b88e1b13d660 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.203800] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89095fb5-d050-49e3-a1f2-e48998fe6eff {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.213782] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7c7039af-ae48-4136-8e76-e406b68034c0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.215426] env[61356]: DEBUG oslo_vmware.api [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Task: {'id': task-4302662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090602} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.215632] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1869.215829] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1869.216023] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1869.216205] env[61356]: INFO nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1869.218209] env[61356]: DEBUG nova.compute.claims [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1869.218378] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.218603] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.238032] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1869.292579] env[61356]: DEBUG oslo_vmware.rw_handles [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1869.350186] env[61356]: DEBUG oslo_vmware.rw_handles [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1869.350391] env[61356]: DEBUG oslo_vmware.rw_handles [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1869.468059] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d071ee-a7fa-48d7-bc8a-397deffc62e2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.476721] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf98531-ac04-41a3-8bbe-bd8628923fe3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.506065] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fb74dc-dc19-4290-8b44-03b72dd2a080 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.512824] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebc227d-29de-4d48-b115-9a0b9d9a72f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.525416] env[61356]: DEBUG nova.compute.provider_tree [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1869.533501] env[61356]: DEBUG nova.scheduler.client.report [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1869.547269] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.329s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.547799] env[61356]: ERROR nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1869.547799] env[61356]: Faults: ['InvalidArgument'] [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Traceback (most recent call last): [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self.driver.spawn(context, instance, image_meta, [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self._fetch_image_if_missing(context, vi) [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] image_cache(vi, tmp_image_ds_loc) [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] vm_util.copy_virtual_disk( [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] session._wait_for_task(vmdk_copy_task) [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] return self.wait_for_task(task_ref) [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] return evt.wait() [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] result = hub.switch() [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] return self.greenlet.switch() [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] self.f(*self.args, **self.kw) [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] raise exceptions.translate_fault(task_info.error) [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Faults: ['InvalidArgument'] [ 1869.547799] env[61356]: ERROR nova.compute.manager [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] [ 1869.548789] env[61356]: DEBUG nova.compute.utils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1869.549929] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Build of instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 was re-scheduled: A specified parameter was not correct: fileType [ 1869.549929] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1869.550325] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1869.550497] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1869.550673] env[61356]: DEBUG nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1869.550837] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1869.928460] env[61356]: DEBUG nova.network.neutron [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.940270] env[61356]: INFO nova.compute.manager [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Took 0.39 seconds to deallocate network for instance. [ 1870.052222] env[61356]: INFO nova.scheduler.client.report [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Deleted allocations for instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 [ 1870.089386] env[61356]: DEBUG oslo_concurrency.lockutils [None req-82b0b43b-6010-4995-8356-2ec59de99ff9 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 618.373s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.091200] env[61356]: DEBUG oslo_concurrency.lockutils [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 422.585s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.091429] env[61356]: DEBUG oslo_concurrency.lockutils [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Acquiring lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.091642] env[61356]: DEBUG oslo_concurrency.lockutils [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.091809] env[61356]: DEBUG oslo_concurrency.lockutils [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.093881] env[61356]: INFO nova.compute.manager [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Terminating instance [ 1870.097773] env[61356]: DEBUG nova.compute.manager [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1870.098036] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1870.098233] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae6f12d7-5bf2-4797-889e-8c3087d4894c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.108359] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8eadb3d-d12b-43c7-bccb-174cc837db6e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.122996] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1870.144208] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500 could not be found. [ 1870.144425] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1870.144607] env[61356]: INFO nova.compute.manager [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1870.144849] env[61356]: DEBUG oslo.service.loopingcall [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1870.145168] env[61356]: DEBUG nova.compute.manager [-] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1870.145206] env[61356]: DEBUG nova.network.neutron [-] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1870.181307] env[61356]: DEBUG nova.network.neutron [-] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.189283] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.189520] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.190968] env[61356]: INFO nova.compute.claims [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1870.193832] env[61356]: INFO nova.compute.manager [-] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] Took 0.05 seconds to deallocate network for instance. [ 1870.283013] env[61356]: DEBUG oslo_concurrency.lockutils [None req-717fd00b-d0cf-456f-86eb-08c83890ab56 tempest-ServerTagsTestJSON-1960439324 tempest-ServerTagsTestJSON-1960439324-project-member] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.284063] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 145.251s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.284264] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 3edbcad5-1bf9-4ef3-9902-c2abfb9e0500] During sync_power_state the instance has a pending task (deleting). Skip. [ 1870.284502] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "3edbcad5-1bf9-4ef3-9902-c2abfb9e0500" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.366725] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81c5580-b536-4093-8e2b-90b91c54a1ab {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.374319] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc1ea22-e7e5-4988-b327-65919ea2e332 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.403135] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fd4b32-7212-4254-b78f-ecfeffdca36a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.409535] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60a1a56-d84c-47b4-8797-bcc82797ec15 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.422848] env[61356]: DEBUG nova.compute.provider_tree [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.434217] env[61356]: DEBUG nova.scheduler.client.report [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1870.448439] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.448942] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1870.477961] env[61356]: DEBUG nova.compute.utils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1870.479293] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1870.479467] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1870.487530] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1870.544583] env[61356]: DEBUG nova.policy [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '877bb82afe8d42ae8f54d1e963c0244b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a8faf03438643ad85e91c93c88ce86d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1870.550408] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1870.578904] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1870.578904] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1870.579152] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1870.579261] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1870.579410] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1870.579560] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1870.579767] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1870.579929] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1870.580137] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1870.580317] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1870.580491] env[61356]: DEBUG nova.virt.hardware [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1870.581428] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a16208-da16-4a16-b3a2-3a172c0d3b96 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.590305] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006563ad-370e-4604-bc95-2c9f9182da28 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.835926] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Successfully created port: 27068a83-d901-4b13-9c13-3780218be311 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1871.500072] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Successfully updated port: 27068a83-d901-4b13-9c13-3780218be311 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1871.514083] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "refresh_cache-9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.514243] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "refresh_cache-9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.514381] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1871.553813] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1871.713160] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Updating instance_info_cache with network_info: [{"id": "27068a83-d901-4b13-9c13-3780218be311", "address": "fa:16:3e:47:1d:85", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27068a83-d9", "ovs_interfaceid": "27068a83-d901-4b13-9c13-3780218be311", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.724223] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "refresh_cache-9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.724515] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Instance network_info: |[{"id": "27068a83-d901-4b13-9c13-3780218be311", "address": "fa:16:3e:47:1d:85", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27068a83-d9", "ovs_interfaceid": "27068a83-d901-4b13-9c13-3780218be311", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1871.724926] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:1d:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27068a83-d901-4b13-9c13-3780218be311', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1871.732686] env[61356]: DEBUG oslo.service.loopingcall [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.733202] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1871.733455] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c8475b7-66ee-4807-884e-872842f6f5f5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.754161] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1871.754161] env[61356]: value = "task-4302663" [ 1871.754161] env[61356]: _type = "Task" [ 1871.754161] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.761905] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302663, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.964989] env[61356]: DEBUG nova.compute.manager [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Received event network-vif-plugged-27068a83-d901-4b13-9c13-3780218be311 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1871.965420] env[61356]: DEBUG oslo_concurrency.lockutils [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] Acquiring lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.965420] env[61356]: DEBUG oslo_concurrency.lockutils [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.965622] env[61356]: DEBUG oslo_concurrency.lockutils [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.965677] env[61356]: DEBUG nova.compute.manager [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] No waiting events found dispatching network-vif-plugged-27068a83-d901-4b13-9c13-3780218be311 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1871.965830] env[61356]: WARNING nova.compute.manager [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Received unexpected event network-vif-plugged-27068a83-d901-4b13-9c13-3780218be311 for instance with vm_state building and task_state spawning. [ 1871.965992] env[61356]: DEBUG nova.compute.manager [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Received event network-changed-27068a83-d901-4b13-9c13-3780218be311 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1871.966161] env[61356]: DEBUG nova.compute.manager [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Refreshing instance network info cache due to event network-changed-27068a83-d901-4b13-9c13-3780218be311. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1871.966344] env[61356]: DEBUG oslo_concurrency.lockutils [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] Acquiring lock "refresh_cache-9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.966525] env[61356]: DEBUG oslo_concurrency.lockutils [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] Acquired lock "refresh_cache-9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.966728] env[61356]: DEBUG nova.network.neutron [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Refreshing network info cache for port 27068a83-d901-4b13-9c13-3780218be311 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1872.215251] env[61356]: DEBUG nova.network.neutron [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Updated VIF entry in instance network info cache for port 27068a83-d901-4b13-9c13-3780218be311. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1872.215657] env[61356]: DEBUG nova.network.neutron [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Updating instance_info_cache with network_info: [{"id": "27068a83-d901-4b13-9c13-3780218be311", "address": "fa:16:3e:47:1d:85", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27068a83-d9", "ovs_interfaceid": "27068a83-d901-4b13-9c13-3780218be311", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.224981] env[61356]: DEBUG oslo_concurrency.lockutils [req-a5dc6646-b40b-4607-9e9e-ecf738c1582f req-7c94d838-6fa7-45d7-8794-65c5b98716c6 service nova] Releasing lock "refresh_cache-9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.265175] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302663, 'name': CreateVM_Task, 'duration_secs': 0.286774} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.265336] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1872.265967] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.266145] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.266456] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1872.266715] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e540601f-13a5-4437-918a-f7a2ae473e9b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.270714] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 1872.270714] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e76e7f-99f4-c64c-3991-daa924470e4c" [ 1872.270714] env[61356]: _type = "Task" [ 1872.270714] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.277553] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52e76e7f-99f4-c64c-3991-daa924470e4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.781041] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.781405] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1872.781521] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.758011] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.771095] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.771320] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.771485] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.771648] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1888.772803] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63489042-e073-47a9-bd65-d54d73cade74 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.781427] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af94f45-d354-4694-bd54-dd6821fff5a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.794956] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24455aa9-da6b-4519-ba97-9917e385ee3f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.801587] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d4febc-1352-45a8-ae0c-e686182eec73 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.833129] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180653MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1888.833290] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.833501] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.914937] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.915250] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.915409] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.915539] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.915695] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.915890] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.916047] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.916169] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.916282] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.916394] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1888.932996] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1888.933248] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1888.933409] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=110GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1889.060070] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658edc7c-bd1c-4e45-a631-30d3b88f44d0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.067671] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0cfbb3-4dc0-417b-b604-08f51e22d910 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.096405] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9deeadec-323b-4c1b-8d47-fe66e5d75782 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.102819] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7a17db-768a-47ee-8f15-f70eb30068d6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.115342] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1889.124608] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1889.137767] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1889.137947] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.304s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.138551] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.753927] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.759139] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.759139] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.759568] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.759568] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1896.758629] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1898.758197] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1900.758448] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1900.758897] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1900.758897] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1900.780697] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781025] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781218] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781355] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781512] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781604] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781726] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781848] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.781966] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.782096] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1900.782218] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1915.270817] env[61356]: WARNING oslo_vmware.rw_handles [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1915.270817] env[61356]: ERROR oslo_vmware.rw_handles [ 1915.271435] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1915.273383] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1915.273615] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Copying Virtual Disk [datastore2] vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/0d072ae6-8377-4bc7-976f-bb1dd7365c4c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1915.273954] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f44cb7e8-4b41-4d96-8281-0e85f82908f3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.282577] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for the task: (returnval){ [ 1915.282577] env[61356]: value = "task-4302664" [ 1915.282577] env[61356]: _type = "Task" [ 1915.282577] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.290469] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': task-4302664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.793455] env[61356]: DEBUG oslo_vmware.exceptions [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1915.793740] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.794328] env[61356]: ERROR nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1915.794328] env[61356]: Faults: ['InvalidArgument'] [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Traceback (most recent call last): [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] yield resources [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self.driver.spawn(context, instance, image_meta, [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._fetch_image_if_missing(context, vi) [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] image_cache(vi, tmp_image_ds_loc) [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] vm_util.copy_virtual_disk( [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] session._wait_for_task(vmdk_copy_task) [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.wait_for_task(task_ref) [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return evt.wait() [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] result = hub.switch() [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.greenlet.switch() [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self.f(*self.args, **self.kw) [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] raise exceptions.translate_fault(task_info.error) [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Faults: ['InvalidArgument'] [ 1915.794328] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] [ 1915.795663] env[61356]: INFO nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Terminating instance [ 1915.796266] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.796472] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1915.796709] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ab0fc3a-9442-4c72-aca2-00fcd0353dcd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.798927] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.799100] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.799270] env[61356]: DEBUG nova.network.neutron [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1915.805912] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1915.806104] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1915.807353] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bebd14c-f11d-4113-9cd2-b56340945480 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.814412] env[61356]: DEBUG oslo_vmware.api [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for the task: (returnval){ [ 1915.814412] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a2baf7-fd2d-e13d-ca13-6a2511d7a574" [ 1915.814412] env[61356]: _type = "Task" [ 1915.814412] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.822200] env[61356]: DEBUG oslo_vmware.api [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a2baf7-fd2d-e13d-ca13-6a2511d7a574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.829074] env[61356]: DEBUG nova.network.neutron [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1915.891181] env[61356]: DEBUG nova.network.neutron [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.900168] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.900583] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1915.900774] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1915.901835] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa57aca-3227-4a5d-b2ef-f52a726ef530 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.909792] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1915.910023] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ad46a24-61d3-4dd9-ac5b-6c33ef240cbd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.946134] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1915.946439] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1915.946565] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Deleting the datastore file [datastore2] 2cc941f9-1184-4ce8-89b9-acb3b537b918 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1915.946798] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-962b7f42-8ad1-47dc-af6b-b270792e9873 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.953217] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for the task: (returnval){ [ 1915.953217] env[61356]: value = "task-4302666" [ 1915.953217] env[61356]: _type = "Task" [ 1915.953217] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.961143] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': task-4302666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.326051] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1916.326051] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Creating directory with path [datastore2] vmware_temp/590787a2-b033-4a56-ba06-a482a4ee645f/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.326051] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce5c1561-aa26-4850-9bb1-468d6e81f4df {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.336385] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Created directory with path [datastore2] vmware_temp/590787a2-b033-4a56-ba06-a482a4ee645f/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.336615] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Fetch image to [datastore2] vmware_temp/590787a2-b033-4a56-ba06-a482a4ee645f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1916.336828] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/590787a2-b033-4a56-ba06-a482a4ee645f/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1916.337589] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb114732-ce5a-40de-a98a-ad46e27fe37b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.344123] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1705e6f0-29b0-40c8-a0ea-3e696843cf40 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.352810] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048503a7-a9a2-4069-bcff-798d7909c431 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.384227] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68562641-bb99-48c8-af4d-172f2ebb5987 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.390005] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-086937fb-af21-4b97-91c4-dc4066dec47a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.411594] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1916.462108] env[61356]: DEBUG oslo_vmware.api [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': task-4302666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034447} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.462358] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1916.462542] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1916.462712] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1916.462882] env[61356]: INFO nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1916.463161] env[61356]: DEBUG oslo.service.loopingcall [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1916.463364] env[61356]: DEBUG nova.compute.manager [-] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1916.465486] env[61356]: DEBUG nova.compute.claims [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1916.465654] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.465888] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.613304] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.614164] env[61356]: ERROR nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Traceback (most recent call last): [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] result = getattr(controller, method)(*args, **kwargs) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._get(image_id) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] resp, body = self.http_client.get(url, headers=header) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.request(url, 'GET', **kwargs) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._handle_response(resp) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise exc.from_response(resp, resp.content) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] During handling of the above exception, another exception occurred: [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Traceback (most recent call last): [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] yield resources [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self.driver.spawn(context, instance, image_meta, [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._fetch_image_if_missing(context, vi) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] image_fetch(context, vi, tmp_image_ds_loc) [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] images.fetch_image( [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1916.614164] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] metadata = IMAGE_API.get(context, image_ref) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return session.show(context, image_id, [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] _reraise_translated_image_exception(image_id) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise new_exc.with_traceback(exc_trace) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] result = getattr(controller, method)(*args, **kwargs) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._get(image_id) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] resp, body = self.http_client.get(url, headers=header) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.request(url, 'GET', **kwargs) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._handle_response(resp) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise exc.from_response(resp, resp.content) [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1916.615623] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1916.615623] env[61356]: INFO nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Terminating instance [ 1916.616480] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.616480] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.616657] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.616810] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.617033] env[61356]: DEBUG nova.network.neutron [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1916.621021] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e4994f2-0f97-41d4-a7e5-5a309f4f3c69 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.629573] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.629753] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1916.632591] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62c231ff-e7ca-44be-af63-4185befe7597 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.637720] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Waiting for the task: (returnval){ [ 1916.637720] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]526a4618-c653-18e4-639d-ae09e6bdee71" [ 1916.637720] env[61356]: _type = "Task" [ 1916.637720] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.645723] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]526a4618-c653-18e4-639d-ae09e6bdee71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.647183] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7529e3cc-7d56-4a17-8bf0-aa46489e1bd9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.652897] env[61356]: DEBUG nova.network.neutron [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1916.655212] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17db8238-bcff-4342-9935-0b273d06f01a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.686360] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df8cc58-3543-4405-9abf-efcda5259cbd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.692881] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72892b5-b2c0-491a-aa84-305dde3971a3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.706385] env[61356]: DEBUG nova.compute.provider_tree [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1916.715893] env[61356]: DEBUG nova.scheduler.client.report [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1916.728777] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.263s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.729301] env[61356]: ERROR nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1916.729301] env[61356]: Faults: ['InvalidArgument'] [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Traceback (most recent call last): [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self.driver.spawn(context, instance, image_meta, [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._fetch_image_if_missing(context, vi) [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] image_cache(vi, tmp_image_ds_loc) [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] vm_util.copy_virtual_disk( [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] session._wait_for_task(vmdk_copy_task) [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.wait_for_task(task_ref) [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return evt.wait() [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] result = hub.switch() [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.greenlet.switch() [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self.f(*self.args, **self.kw) [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] raise exceptions.translate_fault(task_info.error) [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Faults: ['InvalidArgument'] [ 1916.729301] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] [ 1916.730298] env[61356]: DEBUG nova.compute.utils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1916.731576] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Build of instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 was re-scheduled: A specified parameter was not correct: fileType [ 1916.731576] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1916.731949] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1916.732183] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.732333] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.732494] env[61356]: DEBUG nova.network.neutron [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1916.745140] env[61356]: DEBUG nova.network.neutron [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.752795] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.753170] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1916.753359] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1916.754173] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab27bf19-7e77-4a97-851f-17da1f80a9d5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.757414] env[61356]: DEBUG nova.network.neutron [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1916.763188] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1916.763608] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8022dbed-d5ad-48a0-9386-517f046fab6a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.793403] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1916.793602] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1916.793775] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Deleting the datastore file [datastore2] 83829089-1eb5-4337-9d67-71c81c9bbbcd {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1916.794036] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad931a63-d43f-46b6-9ab4-c439dadc041a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.800075] env[61356]: DEBUG oslo_vmware.api [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for the task: (returnval){ [ 1916.800075] env[61356]: value = "task-4302668" [ 1916.800075] env[61356]: _type = "Task" [ 1916.800075] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.807300] env[61356]: DEBUG oslo_vmware.api [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': task-4302668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.845158] env[61356]: DEBUG nova.network.neutron [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.853893] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.854118] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1916.854309] env[61356]: DEBUG nova.compute.manager [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1916.944576] env[61356]: INFO nova.scheduler.client.report [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Deleted allocations for instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 [ 1916.965590] env[61356]: DEBUG oslo_concurrency.lockutils [None req-26e9d61b-5ee6-43cc-a3fb-d083ef4e2540 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.428s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.966656] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 223.796s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.966895] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "2cc941f9-1184-4ce8-89b9-acb3b537b918-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.967120] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.967289] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.969162] env[61356]: INFO nova.compute.manager [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Terminating instance [ 1916.970640] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.970794] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.971214] env[61356]: DEBUG nova.network.neutron [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1916.978930] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1916.996548] env[61356]: DEBUG nova.network.neutron [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1917.029015] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.029287] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.031049] env[61356]: INFO nova.compute.claims [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1917.054450] env[61356]: DEBUG nova.network.neutron [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.062925] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "refresh_cache-2cc941f9-1184-4ce8-89b9-acb3b537b918" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.063341] env[61356]: DEBUG nova.compute.manager [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1917.063738] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1917.064058] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e61d42f3-2305-4cef-9758-462d70334c0a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.073119] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1de3b4-ce5f-47b1-abcd-c3931e7c5d99 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.104113] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2cc941f9-1184-4ce8-89b9-acb3b537b918 could not be found. [ 1917.104322] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1917.104500] env[61356]: INFO nova.compute.manager [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1917.104731] env[61356]: DEBUG oslo.service.loopingcall [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1917.104973] env[61356]: DEBUG nova.compute.manager [-] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1917.105083] env[61356]: DEBUG nova.network.neutron [-] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1917.149941] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1917.150635] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Creating directory with path [datastore2] vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1917.150635] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00321eaf-f5eb-470b-8bcf-936d8424de8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.164983] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Created directory with path [datastore2] vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1917.165137] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Fetch image to [datastore2] vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1917.165294] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1917.169408] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c458105-845c-480f-8fa5-0a606ae55a62 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.179820] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46ba783-be2a-4bde-a37d-7eb7544f8f21 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.190530] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a00cdd-133c-480d-b197-e63a5a040e95 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.222949] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3cdbb7-765b-46e4-86e6-d41a48b2b5c5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.228564] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b4905bfd-ef26-41a4-b204-838fe1280c40 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.260572] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1917.284397] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa0a0d9-d90f-4035-ae2f-ceaa78f58834 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.291229] env[61356]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61356) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1917.291532] env[61356]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-09f8cf74-2438-484e-a6a8-2eecbea28eac'] [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1917.292150] env[61356]: ERROR oslo.service.loopingcall [ 1917.293601] env[61356]: ERROR nova.compute.manager [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1917.300149] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc322e3-c68d-40ed-9247-5fe81ac731db {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.340262] env[61356]: DEBUG oslo_vmware.api [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Task: {'id': task-4302668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038562} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.341627] env[61356]: DEBUG oslo_vmware.rw_handles [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1917.344243] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1917.344243] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1917.344243] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1917.344243] env[61356]: INFO nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1917.344518] env[61356]: DEBUG oslo.service.loopingcall [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1917.345300] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2953225-fa9e-4b76-8433-c8d4241db9a2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.349183] env[61356]: ERROR nova.compute.manager [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Traceback (most recent call last): [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] ret = obj(*args, **kwargs) [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] exception_handler_v20(status_code, error_body) [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] raise client_exc(message=error_message, [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Neutron server returns request_ids: ['req-09f8cf74-2438-484e-a6a8-2eecbea28eac'] [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] During handling of the above exception, another exception occurred: [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Traceback (most recent call last): [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._delete_instance(context, instance, bdms) [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._shutdown_instance(context, instance, bdms) [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._try_deallocate_network(context, instance, requested_networks) [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] with excutils.save_and_reraise_exception(): [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self.force_reraise() [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] raise self.value [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] _deallocate_network_with_retries() [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return evt.wait() [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] result = hub.switch() [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.greenlet.switch() [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] result = func(*self.args, **self.kw) [ 1917.349183] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] result = f(*args, **kwargs) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._deallocate_network( [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self.network_api.deallocate_for_instance( [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] data = neutron.list_ports(**search_opts) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] ret = obj(*args, **kwargs) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.list('ports', self.ports_path, retrieve_all, [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] ret = obj(*args, **kwargs) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] for r in self._pagination(collection, path, **params): [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] res = self.get(path, params=params) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] ret = obj(*args, **kwargs) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.retry_request("GET", action, body=body, [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] ret = obj(*args, **kwargs) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] return self.do_request(method, action, body=body, [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] ret = obj(*args, **kwargs) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] self._handle_fault_response(status_code, replybody, resp) [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1917.350583] env[61356]: ERROR nova.compute.manager [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] [ 1917.356244] env[61356]: DEBUG nova.compute.manager [-] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1917.408134] env[61356]: DEBUG nova.compute.claims [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1917.408329] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.409213] env[61356]: DEBUG oslo_concurrency.lockutils [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.443s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.411044] env[61356]: DEBUG oslo_vmware.rw_handles [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1917.411381] env[61356]: DEBUG oslo_vmware.rw_handles [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1917.413995] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 192.381s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.414207] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] During sync_power_state the instance has a pending task (deleting). Skip. [ 1917.414399] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "2cc941f9-1184-4ce8-89b9-acb3b537b918" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.415967] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa04d263-b54a-420b-858e-6063c26a88e5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.432553] env[61356]: DEBUG nova.compute.provider_tree [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.443742] env[61356]: DEBUG nova.scheduler.client.report [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1917.457613] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.428s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.458172] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1917.460859] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.052s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.480549] env[61356]: INFO nova.compute.manager [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 2cc941f9-1184-4ce8-89b9-acb3b537b918] Successfully reverted task state from None on failure for instance. [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server [None req-b9725aab-e44a-49ca-9151-3effd05ca814 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-09f8cf74-2438-484e-a6a8-2eecbea28eac'] [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1917.483911] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1917.485555] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1917.487246] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1917.487246] env[61356]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1917.487246] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1917.487246] env[61356]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1917.487246] env[61356]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1917.487246] env[61356]: ERROR oslo_messaging.rpc.server [ 1917.497240] env[61356]: DEBUG nova.compute.utils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1917.501982] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1917.502974] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1917.510202] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1917.563722] env[61356]: DEBUG nova.policy [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c04f908f7b34e79aaa93dde54c979ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bcdb0191d3e48ee944f4dc611c64fb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1917.576275] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1917.597819] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1917.598080] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1917.598256] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1917.598458] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1917.598589] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1917.598733] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1917.599107] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1917.599285] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1917.599454] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1917.599617] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1917.599880] env[61356]: DEBUG nova.virt.hardware [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1917.600651] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ce427e-6087-4640-97ea-efdac56b4f5b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.610232] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213f0066-9b3a-46ba-8dff-d56c15bca7aa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.637471] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5723f4c1-427e-4bef-b957-d9741d5dc379 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.643875] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8e0545-2de7-4873-a5d8-267c3c3868e7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.674256] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdb814b-a6f7-4a2b-920a-8874d27e8b81 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.680925] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821f2824-9e88-4152-899e-85c9f6227989 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.693383] env[61356]: DEBUG nova.compute.provider_tree [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.701986] env[61356]: DEBUG nova.scheduler.client.report [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1917.715500] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.255s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.716274] env[61356]: ERROR nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Traceback (most recent call last): [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] result = getattr(controller, method)(*args, **kwargs) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._get(image_id) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] resp, body = self.http_client.get(url, headers=header) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.request(url, 'GET', **kwargs) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._handle_response(resp) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise exc.from_response(resp, resp.content) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] During handling of the above exception, another exception occurred: [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Traceback (most recent call last): [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self.driver.spawn(context, instance, image_meta, [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._fetch_image_if_missing(context, vi) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] image_fetch(context, vi, tmp_image_ds_loc) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] images.fetch_image( [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] metadata = IMAGE_API.get(context, image_ref) [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1917.716274] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return session.show(context, image_id, [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] _reraise_translated_image_exception(image_id) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise new_exc.with_traceback(exc_trace) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] result = getattr(controller, method)(*args, **kwargs) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._get(image_id) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] resp, body = self.http_client.get(url, headers=header) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.request(url, 'GET', **kwargs) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self._handle_response(resp) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise exc.from_response(resp, resp.content) [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] nova.exception.ImageNotAuthorized: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. [ 1917.717515] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1917.717515] env[61356]: DEBUG nova.compute.utils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1917.718581] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Build of instance 83829089-1eb5-4337-9d67-71c81c9bbbcd was re-scheduled: Not authorized for image 88d3b9c3-6614-40fd-b1c9-50fb806e539a. {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1917.719067] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1917.719293] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.719438] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.719601] env[61356]: DEBUG nova.network.neutron [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1917.743936] env[61356]: DEBUG nova.network.neutron [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1917.805461] env[61356]: DEBUG nova.network.neutron [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.814469] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.814648] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1917.814828] env[61356]: DEBUG nova.compute.manager [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Skipping network deallocation for instance since networking was not requested. {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1917.880119] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Successfully created port: 18e506bb-193d-479b-b824-25e5bf1f700e {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1917.941232] env[61356]: INFO nova.scheduler.client.report [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Deleted allocations for instance 83829089-1eb5-4337-9d67-71c81c9bbbcd [ 1917.962568] env[61356]: DEBUG oslo_concurrency.lockutils [None req-e7c3b080-ab29-41c9-b140-79f8dfc57f94 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 617.055s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.962820] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 421.531s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.963249] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "83829089-1eb5-4337-9d67-71c81c9bbbcd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.963249] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.963440] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.966467] env[61356]: INFO nova.compute.manager [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Terminating instance [ 1917.968236] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquiring lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.968394] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Acquired lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.968561] env[61356]: DEBUG nova.network.neutron [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1917.996718] env[61356]: DEBUG nova.network.neutron [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1918.061532] env[61356]: DEBUG nova.network.neutron [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.070172] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Releasing lock "refresh_cache-83829089-1eb5-4337-9d67-71c81c9bbbcd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.071060] env[61356]: DEBUG nova.compute.manager [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1918.071060] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1918.071189] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7089a0d-c7d0-4ad4-8ee4-3c9b5c28f3b5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.080608] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4735d938-0f30-482a-8b14-2c7d33e4118a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.112068] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 83829089-1eb5-4337-9d67-71c81c9bbbcd could not be found. [ 1918.112311] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1918.112490] env[61356]: INFO nova.compute.manager [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1918.112756] env[61356]: DEBUG oslo.service.loopingcall [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1918.113038] env[61356]: DEBUG nova.compute.manager [-] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1918.113190] env[61356]: DEBUG nova.network.neutron [-] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1918.217138] env[61356]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61356) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1918.217138] env[61356]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-4f76d082-c0e4-4578-be06-f8b20b74cf07'] [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1918.217534] env[61356]: ERROR oslo.service.loopingcall [ 1918.218986] env[61356]: ERROR nova.compute.manager [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1918.253817] env[61356]: ERROR nova.compute.manager [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Traceback (most recent call last): [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] ret = obj(*args, **kwargs) [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] exception_handler_v20(status_code, error_body) [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise client_exc(message=error_message, [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Neutron server returns request_ids: ['req-4f76d082-c0e4-4578-be06-f8b20b74cf07'] [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] During handling of the above exception, another exception occurred: [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Traceback (most recent call last): [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._delete_instance(context, instance, bdms) [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._shutdown_instance(context, instance, bdms) [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._try_deallocate_network(context, instance, requested_networks) [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] with excutils.save_and_reraise_exception(): [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self.force_reraise() [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise self.value [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] _deallocate_network_with_retries() [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return evt.wait() [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] result = hub.switch() [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.greenlet.switch() [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] result = func(*self.args, **self.kw) [ 1918.253817] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] result = f(*args, **kwargs) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._deallocate_network( [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self.network_api.deallocate_for_instance( [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] data = neutron.list_ports(**search_opts) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] ret = obj(*args, **kwargs) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.list('ports', self.ports_path, retrieve_all, [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] ret = obj(*args, **kwargs) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] for r in self._pagination(collection, path, **params): [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] res = self.get(path, params=params) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] ret = obj(*args, **kwargs) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.retry_request("GET", action, body=body, [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] ret = obj(*args, **kwargs) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] return self.do_request(method, action, body=body, [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] ret = obj(*args, **kwargs) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] self._handle_fault_response(status_code, replybody, resp) [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1918.254891] env[61356]: ERROR nova.compute.manager [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] [ 1918.281851] env[61356]: DEBUG oslo_concurrency.lockutils [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.319s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.283116] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 193.250s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.283235] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] During sync_power_state the instance has a pending task (deleting). Skip. [ 1918.283365] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "83829089-1eb5-4337-9d67-71c81c9bbbcd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.335152] env[61356]: INFO nova.compute.manager [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] [instance: 83829089-1eb5-4337-9d67-71c81c9bbbcd] Successfully reverted task state from None on failure for instance. [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server [None req-32461ac8-ddd0-4547-bab2-62cb64964339 tempest-ServerShowV247Test-1033627926 tempest-ServerShowV247Test-1033627926-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-4f76d082-c0e4-4578-be06-f8b20b74cf07'] [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1918.338921] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1918.340598] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1918.342364] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1918.342364] env[61356]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1918.342364] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1918.342364] env[61356]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1918.342364] env[61356]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1918.342364] env[61356]: ERROR oslo_messaging.rpc.server [ 1918.723359] env[61356]: DEBUG nova.compute.manager [req-64552c8a-b41c-4f75-97c2-50a342dc5e67 req-a2a27bd1-79fc-4ead-9184-c5e00cb88335 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Received event network-vif-plugged-18e506bb-193d-479b-b824-25e5bf1f700e {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1918.723359] env[61356]: DEBUG oslo_concurrency.lockutils [req-64552c8a-b41c-4f75-97c2-50a342dc5e67 req-a2a27bd1-79fc-4ead-9184-c5e00cb88335 service nova] Acquiring lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.723359] env[61356]: DEBUG oslo_concurrency.lockutils [req-64552c8a-b41c-4f75-97c2-50a342dc5e67 req-a2a27bd1-79fc-4ead-9184-c5e00cb88335 service nova] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.723359] env[61356]: DEBUG oslo_concurrency.lockutils [req-64552c8a-b41c-4f75-97c2-50a342dc5e67 req-a2a27bd1-79fc-4ead-9184-c5e00cb88335 service nova] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.725210] env[61356]: DEBUG nova.compute.manager [req-64552c8a-b41c-4f75-97c2-50a342dc5e67 req-a2a27bd1-79fc-4ead-9184-c5e00cb88335 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] No waiting events found dispatching network-vif-plugged-18e506bb-193d-479b-b824-25e5bf1f700e {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1918.726074] env[61356]: WARNING nova.compute.manager [req-64552c8a-b41c-4f75-97c2-50a342dc5e67 req-a2a27bd1-79fc-4ead-9184-c5e00cb88335 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Received unexpected event network-vif-plugged-18e506bb-193d-479b-b824-25e5bf1f700e for instance with vm_state building and task_state spawning. [ 1918.731519] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Successfully updated port: 18e506bb-193d-479b-b824-25e5bf1f700e {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1918.744827] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "refresh_cache-7f078f06-c61b-4215-a95b-25ba3adaf5fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.744827] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquired lock "refresh_cache-7f078f06-c61b-4215-a95b-25ba3adaf5fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.744827] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1918.808497] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1918.994956] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Updating instance_info_cache with network_info: [{"id": "18e506bb-193d-479b-b824-25e5bf1f700e", "address": "fa:16:3e:fb:ef:ae", "network": {"id": "3a71f3d9-51b3-40da-ae31-fe82f6950626", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2092771782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bcdb0191d3e48ee944f4dc611c64fb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18e506bb-19", "ovs_interfaceid": "18e506bb-193d-479b-b824-25e5bf1f700e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.005955] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Releasing lock "refresh_cache-7f078f06-c61b-4215-a95b-25ba3adaf5fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.006308] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Instance network_info: |[{"id": "18e506bb-193d-479b-b824-25e5bf1f700e", "address": "fa:16:3e:fb:ef:ae", "network": {"id": "3a71f3d9-51b3-40da-ae31-fe82f6950626", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2092771782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bcdb0191d3e48ee944f4dc611c64fb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18e506bb-19", "ovs_interfaceid": "18e506bb-193d-479b-b824-25e5bf1f700e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1919.006761] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:ef:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18e506bb-193d-479b-b824-25e5bf1f700e', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1919.014389] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Creating folder: Project (3bcdb0191d3e48ee944f4dc611c64fb2). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1919.015254] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf5d0438-de32-4148-8ac3-122584aeb5f4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.026656] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Created folder: Project (3bcdb0191d3e48ee944f4dc611c64fb2) in parent group-v846926. [ 1919.026817] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Creating folder: Instances. Parent ref: group-v847038. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1919.027045] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97da7290-a027-45f8-b033-5473bc47a11b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.035754] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Created folder: Instances in parent group-v847038. [ 1919.036019] env[61356]: DEBUG oslo.service.loopingcall [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.036207] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1919.036394] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afda9bf8-c617-4d52-b459-46209fde1f32 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.055416] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1919.055416] env[61356]: value = "task-4302671" [ 1919.055416] env[61356]: _type = "Task" [ 1919.055416] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.063120] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302671, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.565504] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302671, 'name': CreateVM_Task, 'duration_secs': 0.280992} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.565893] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1919.566405] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.566567] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.566903] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1919.567171] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb86f431-1a48-4ccd-8efc-0d0101929170 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.571412] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Waiting for the task: (returnval){ [ 1919.571412] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5257435f-31de-9b8d-f448-873fb441b463" [ 1919.571412] env[61356]: _type = "Task" [ 1919.571412] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.578483] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5257435f-31de-9b8d-f448-873fb441b463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.083275] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.083556] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1920.083743] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.806385] env[61356]: DEBUG nova.compute.manager [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Received event network-changed-18e506bb-193d-479b-b824-25e5bf1f700e {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1920.806642] env[61356]: DEBUG nova.compute.manager [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Refreshing instance network info cache due to event network-changed-18e506bb-193d-479b-b824-25e5bf1f700e. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1920.806740] env[61356]: DEBUG oslo_concurrency.lockutils [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] Acquiring lock "refresh_cache-7f078f06-c61b-4215-a95b-25ba3adaf5fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.806854] env[61356]: DEBUG oslo_concurrency.lockutils [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] Acquired lock "refresh_cache-7f078f06-c61b-4215-a95b-25ba3adaf5fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.807067] env[61356]: DEBUG nova.network.neutron [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Refreshing network info cache for port 18e506bb-193d-479b-b824-25e5bf1f700e {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1921.133656] env[61356]: DEBUG nova.network.neutron [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Updated VIF entry in instance network info cache for port 18e506bb-193d-479b-b824-25e5bf1f700e. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1921.134205] env[61356]: DEBUG nova.network.neutron [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Updating instance_info_cache with network_info: [{"id": "18e506bb-193d-479b-b824-25e5bf1f700e", "address": "fa:16:3e:fb:ef:ae", "network": {"id": "3a71f3d9-51b3-40da-ae31-fe82f6950626", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-2092771782-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bcdb0191d3e48ee944f4dc611c64fb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18e506bb-19", "ovs_interfaceid": "18e506bb-193d-479b-b824-25e5bf1f700e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.145703] env[61356]: DEBUG oslo_concurrency.lockutils [req-41664c4a-ecf2-4adf-80d9-9f36bf6a5c0e req-7401e885-a807-4d8f-8815-f48768494bf2 service nova] Releasing lock "refresh_cache-7f078f06-c61b-4215-a95b-25ba3adaf5fd" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.020453] env[61356]: DEBUG oslo_concurrency.lockutils [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.961317] env[61356]: DEBUG oslo_concurrency.lockutils [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "13efd018-f491-4979-87b8-0b4af68aaf1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.757666] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1950.769087] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.769297] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.769488] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.769645] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1950.770747] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5a6d1e-6562-4135-bf5e-89b012b44a3e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.779363] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8c73f0-d125-496e-b4a6-f1692f8c3555 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.794182] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1b9c4d-be33-410b-ba8b-2bae1f67c1cc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.800243] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0624611f-4161-450f-b2bc-73ec89bf321a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.828400] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180673MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1950.828556] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.828717] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.906135] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9958f378-1cab-49bf-8375-5e95c16401e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.906325] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.906458] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.906580] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.906699] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.906816] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.906932] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.907061] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.907182] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1950.907371] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1950.907505] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=110GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1951.013815] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ef7088-aed1-42b5-89bb-b1e06f044e0f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.021727] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d04b07-aeb3-4f57-bd81-136497844498 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.052222] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd40c457-36b3-4ad7-8e5d-da8b80413873 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.058961] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83dce29-4c35-4042-a7b3-569263d1ed81 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.071464] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.079599] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1951.093179] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1951.093370] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.265s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.094543] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.753590] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.757468] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.758219] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.758538] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.758653] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.758764] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 1960.757956] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.758559] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1962.758859] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 1962.758859] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 1962.778675] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.778832] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.778964] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.779103] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.779228] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.779349] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.779470] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.779585] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.779701] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 1962.779818] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 1964.775934] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1967.129427] env[61356]: WARNING oslo_vmware.rw_handles [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.129427] env[61356]: ERROR oslo_vmware.rw_handles [ 1967.130222] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1967.133069] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1967.133463] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Copying Virtual Disk [datastore2] vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/33b737f1-5ee9-41cf-a9d8-baedfc34c60c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1967.133864] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2ded196-f64a-43c8-954a-f928c967f68f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.143916] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Waiting for the task: (returnval){ [ 1967.143916] env[61356]: value = "task-4302672" [ 1967.143916] env[61356]: _type = "Task" [ 1967.143916] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.155604] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Task: {'id': task-4302672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.654366] env[61356]: DEBUG oslo_vmware.exceptions [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1967.654665] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.655218] env[61356]: ERROR nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.655218] env[61356]: Faults: ['InvalidArgument'] [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Traceback (most recent call last): [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] yield resources [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self.driver.spawn(context, instance, image_meta, [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self._fetch_image_if_missing(context, vi) [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] image_cache(vi, tmp_image_ds_loc) [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] vm_util.copy_virtual_disk( [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] session._wait_for_task(vmdk_copy_task) [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] return self.wait_for_task(task_ref) [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] return evt.wait() [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] result = hub.switch() [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] return self.greenlet.switch() [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self.f(*self.args, **self.kw) [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] raise exceptions.translate_fault(task_info.error) [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Faults: ['InvalidArgument'] [ 1967.655218] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] [ 1967.656375] env[61356]: INFO nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Terminating instance [ 1967.657102] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.657311] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1967.657547] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b30b68d0-9ff1-40e7-b8d9-14037baa24d7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.659635] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1967.659826] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1967.660547] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b6cbe7-9795-4b8b-972a-a44b0db62beb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.667070] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1967.667271] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9a27678-a42f-4139-ad6e-4f07edc3bd7d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.669328] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1967.669497] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1967.670408] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecb5e206-2819-44ec-937d-d1fc8ffcd6ef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.674882] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 1967.674882] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5291c9b9-30bb-96ee-a914-4215954bc014" [ 1967.674882] env[61356]: _type = "Task" [ 1967.674882] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.682517] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5291c9b9-30bb-96ee-a914-4215954bc014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.740695] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1967.740932] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1967.741133] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Deleting the datastore file [datastore2] 9958f378-1cab-49bf-8375-5e95c16401e4 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1967.741378] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2cd9f3a-3b39-4813-b83b-3fbef8493607 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.747432] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Waiting for the task: (returnval){ [ 1967.747432] env[61356]: value = "task-4302674" [ 1967.747432] env[61356]: _type = "Task" [ 1967.747432] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.755802] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Task: {'id': task-4302674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.185769] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1968.186183] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating directory with path [datastore2] vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1968.186280] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d39fdae7-7f56-4c45-ab5f-06c465abbd9c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.197396] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created directory with path [datastore2] vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1968.197615] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Fetch image to [datastore2] vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1968.197751] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1968.198448] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc300227-089d-4cf4-a0bd-633dfd192747 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.204380] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b47c5e7-9296-4f72-b2fa-fdbc634ac767 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.212763] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d978fc01-aa0e-4aa1-a963-9e0f9572db3b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.242099] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831436ed-bf10-4fd3-867c-d41e44a62515 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.247070] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8a183bff-d73f-4a04-aa6c-d8c1815c2553 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.256861] env[61356]: DEBUG oslo_vmware.api [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Task: {'id': task-4302674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066184} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.257101] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1968.257281] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1968.257453] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1968.257626] env[61356]: INFO nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1968.259672] env[61356]: DEBUG nova.compute.claims [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1968.259849] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.260069] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.270250] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1968.320673] env[61356]: DEBUG oslo_vmware.rw_handles [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1968.379969] env[61356]: DEBUG oslo_vmware.rw_handles [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1968.380177] env[61356]: DEBUG oslo_vmware.rw_handles [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1968.475986] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8861c045-cc93-4328-ad0e-58950ea42575 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.483517] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b77223-116a-4925-bdec-81ee1df25d8a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.513947] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b1f1d0-b852-4b9a-8932-8ae126a3040a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.521087] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e730c8a-6a15-4a7d-bcdd-df36f16c8191 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.533894] env[61356]: DEBUG nova.compute.provider_tree [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.542470] env[61356]: DEBUG nova.scheduler.client.report [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1968.555796] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.556342] env[61356]: ERROR nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1968.556342] env[61356]: Faults: ['InvalidArgument'] [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Traceback (most recent call last): [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self.driver.spawn(context, instance, image_meta, [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self._fetch_image_if_missing(context, vi) [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] image_cache(vi, tmp_image_ds_loc) [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] vm_util.copy_virtual_disk( [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] session._wait_for_task(vmdk_copy_task) [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] return self.wait_for_task(task_ref) [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] return evt.wait() [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] result = hub.switch() [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] return self.greenlet.switch() [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] self.f(*self.args, **self.kw) [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] raise exceptions.translate_fault(task_info.error) [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Faults: ['InvalidArgument'] [ 1968.556342] env[61356]: ERROR nova.compute.manager [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] [ 1968.557336] env[61356]: DEBUG nova.compute.utils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1968.558449] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Build of instance 9958f378-1cab-49bf-8375-5e95c16401e4 was re-scheduled: A specified parameter was not correct: fileType [ 1968.558449] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1968.558890] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1968.559084] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1968.559260] env[61356]: DEBUG nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1968.559421] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1968.891127] env[61356]: DEBUG nova.network.neutron [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.907198] env[61356]: INFO nova.compute.manager [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Took 0.35 seconds to deallocate network for instance. [ 1969.009867] env[61356]: INFO nova.scheduler.client.report [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Deleted allocations for instance 9958f378-1cab-49bf-8375-5e95c16401e4 [ 1969.036073] env[61356]: DEBUG oslo_concurrency.lockutils [None req-dca6b48c-a083-47c7-ac56-142d70c60776 tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "9958f378-1cab-49bf-8375-5e95c16401e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 593.545s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.036358] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "9958f378-1cab-49bf-8375-5e95c16401e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 397.026s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.036596] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Acquiring lock "9958f378-1cab-49bf-8375-5e95c16401e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.036812] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "9958f378-1cab-49bf-8375-5e95c16401e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.036982] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "9958f378-1cab-49bf-8375-5e95c16401e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.039405] env[61356]: INFO nova.compute.manager [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Terminating instance [ 1969.042121] env[61356]: DEBUG nova.compute.manager [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1969.042325] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1969.042854] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca89467a-2ebc-49aa-87b7-17dd400d3fa7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.052717] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a078a4-367e-4285-9145-c7478cdae523 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.081703] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9958f378-1cab-49bf-8375-5e95c16401e4 could not be found. [ 1969.081940] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1969.082151] env[61356]: INFO nova.compute.manager [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1969.082398] env[61356]: DEBUG oslo.service.loopingcall [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.082633] env[61356]: DEBUG nova.compute.manager [-] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1969.082728] env[61356]: DEBUG nova.network.neutron [-] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1969.112027] env[61356]: DEBUG nova.network.neutron [-] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.121324] env[61356]: INFO nova.compute.manager [-] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] Took 0.04 seconds to deallocate network for instance. [ 1969.221108] env[61356]: DEBUG oslo_concurrency.lockutils [None req-6eeecbee-2e34-4caa-933f-214da72376ea tempest-ServerActionsTestOtherA-142511620 tempest-ServerActionsTestOtherA-142511620-project-member] Lock "9958f378-1cab-49bf-8375-5e95c16401e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.221946] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "9958f378-1cab-49bf-8375-5e95c16401e4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 244.188s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.222167] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9958f378-1cab-49bf-8375-5e95c16401e4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1969.222345] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "9958f378-1cab-49bf-8375-5e95c16401e4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.723936] env[61356]: DEBUG oslo_concurrency.lockutils [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.807542] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "887b020c-3652-4724-a0d2-d7413325c5c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.807858] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "887b020c-3652-4724-a0d2-d7413325c5c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.818641] env[61356]: DEBUG nova.compute.manager [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1982.867871] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.868133] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.869698] env[61356]: INFO nova.compute.claims [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1983.031325] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082e9503-e507-40bd-b1dc-12e043a4d246 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.039256] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e22cd5-af3f-4063-b638-fc2bc1c7d775 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.806236] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb535fe2-0fdc-4480-ae38-96ed1877ad9c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.814050] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f941f95e-dd2c-4629-b7cc-1912b6a8ab32 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.826918] env[61356]: DEBUG nova.compute.provider_tree [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1983.836317] env[61356]: DEBUG nova.scheduler.client.report [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1983.850670] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.982s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.851207] env[61356]: DEBUG nova.compute.manager [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1983.888954] env[61356]: DEBUG nova.compute.utils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1983.890515] env[61356]: DEBUG nova.compute.manager [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1983.890515] env[61356]: DEBUG nova.network.neutron [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1983.902504] env[61356]: DEBUG nova.compute.manager [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1983.957556] env[61356]: DEBUG nova.policy [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02fb0b02ccf0433e9f740f4b7177ab34', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f6c093c00e44197ba6affbc1c4964bf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 1983.966349] env[61356]: DEBUG nova.compute.manager [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1983.991103] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1983.991538] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1983.991732] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1983.991923] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1983.992083] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1983.992233] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1983.992439] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1983.992600] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1983.992767] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1983.992943] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1983.993165] env[61356]: DEBUG nova.virt.hardware [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1983.993999] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c68cf6-701f-4441-96e4-b4b2d7e26526 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.001945] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd47663-7cc7-4a1e-ab02-7926f3a9c18b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.466958] env[61356]: DEBUG nova.network.neutron [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Successfully created port: 1d86f1ba-f3dc-41a9-a77d-630670cb20a7 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1985.056248] env[61356]: DEBUG nova.compute.manager [req-2e5dee41-d68b-4ba4-8f23-3b559cd0108d req-25706574-ce5e-4190-a6ec-05fffa24ec55 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Received event network-vif-plugged-1d86f1ba-f3dc-41a9-a77d-630670cb20a7 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1985.056551] env[61356]: DEBUG oslo_concurrency.lockutils [req-2e5dee41-d68b-4ba4-8f23-3b559cd0108d req-25706574-ce5e-4190-a6ec-05fffa24ec55 service nova] Acquiring lock "887b020c-3652-4724-a0d2-d7413325c5c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.056716] env[61356]: DEBUG oslo_concurrency.lockutils [req-2e5dee41-d68b-4ba4-8f23-3b559cd0108d req-25706574-ce5e-4190-a6ec-05fffa24ec55 service nova] Lock "887b020c-3652-4724-a0d2-d7413325c5c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.056887] env[61356]: DEBUG oslo_concurrency.lockutils [req-2e5dee41-d68b-4ba4-8f23-3b559cd0108d req-25706574-ce5e-4190-a6ec-05fffa24ec55 service nova] Lock "887b020c-3652-4724-a0d2-d7413325c5c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.057064] env[61356]: DEBUG nova.compute.manager [req-2e5dee41-d68b-4ba4-8f23-3b559cd0108d req-25706574-ce5e-4190-a6ec-05fffa24ec55 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] No waiting events found dispatching network-vif-plugged-1d86f1ba-f3dc-41a9-a77d-630670cb20a7 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1985.057228] env[61356]: WARNING nova.compute.manager [req-2e5dee41-d68b-4ba4-8f23-3b559cd0108d req-25706574-ce5e-4190-a6ec-05fffa24ec55 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Received unexpected event network-vif-plugged-1d86f1ba-f3dc-41a9-a77d-630670cb20a7 for instance with vm_state building and task_state spawning. [ 1985.144893] env[61356]: DEBUG nova.network.neutron [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Successfully updated port: 1d86f1ba-f3dc-41a9-a77d-630670cb20a7 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1985.155800] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "refresh_cache-887b020c-3652-4724-a0d2-d7413325c5c1" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.155990] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired lock "refresh_cache-887b020c-3652-4724-a0d2-d7413325c5c1" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.156158] env[61356]: DEBUG nova.network.neutron [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1985.198880] env[61356]: DEBUG nova.network.neutron [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1985.721851] env[61356]: DEBUG nova.network.neutron [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Updating instance_info_cache with network_info: [{"id": "1d86f1ba-f3dc-41a9-a77d-630670cb20a7", "address": "fa:16:3e:86:fd:93", "network": {"id": "cf4b8e3d-edbd-4eff-a69d-181d9b32fa8e", "bridge": "br-int", "label": "tempest-ServersTestJSON-832994017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f6c093c00e44197ba6affbc1c4964bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d86f1ba-f3", "ovs_interfaceid": "1d86f1ba-f3dc-41a9-a77d-630670cb20a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.735517] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Releasing lock "refresh_cache-887b020c-3652-4724-a0d2-d7413325c5c1" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.735785] env[61356]: DEBUG nova.compute.manager [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Instance network_info: |[{"id": "1d86f1ba-f3dc-41a9-a77d-630670cb20a7", "address": "fa:16:3e:86:fd:93", "network": {"id": "cf4b8e3d-edbd-4eff-a69d-181d9b32fa8e", "bridge": "br-int", "label": "tempest-ServersTestJSON-832994017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f6c093c00e44197ba6affbc1c4964bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d86f1ba-f3", "ovs_interfaceid": "1d86f1ba-f3dc-41a9-a77d-630670cb20a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1985.736205] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:fd:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02092ea4-bae0-4e42-b0ab-abc365b4395a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d86f1ba-f3dc-41a9-a77d-630670cb20a7', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1985.745145] env[61356]: DEBUG oslo.service.loopingcall [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1985.745650] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1985.745897] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e282a7d6-9055-4699-a9d7-72381139ed87 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.766674] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1985.766674] env[61356]: value = "task-4302675" [ 1985.766674] env[61356]: _type = "Task" [ 1985.766674] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.774054] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302675, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.276332] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302675, 'name': CreateVM_Task, 'duration_secs': 0.269231} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.276737] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1986.277233] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.277395] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1986.277864] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1986.278168] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60214a04-a610-4893-bba3-33e0697029f2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.282625] env[61356]: DEBUG oslo_vmware.api [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for the task: (returnval){ [ 1986.282625] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]526f025e-1fda-d4a8-dad2-3b92a679143f" [ 1986.282625] env[61356]: _type = "Task" [ 1986.282625] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.290031] env[61356]: DEBUG oslo_vmware.api [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]526f025e-1fda-d4a8-dad2-3b92a679143f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.793291] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.793553] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1986.793766] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.087183] env[61356]: DEBUG nova.compute.manager [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Received event network-changed-1d86f1ba-f3dc-41a9-a77d-630670cb20a7 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 1987.087417] env[61356]: DEBUG nova.compute.manager [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Refreshing instance network info cache due to event network-changed-1d86f1ba-f3dc-41a9-a77d-630670cb20a7. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 1987.087568] env[61356]: DEBUG oslo_concurrency.lockutils [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] Acquiring lock "refresh_cache-887b020c-3652-4724-a0d2-d7413325c5c1" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.087711] env[61356]: DEBUG oslo_concurrency.lockutils [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] Acquired lock "refresh_cache-887b020c-3652-4724-a0d2-d7413325c5c1" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.087870] env[61356]: DEBUG nova.network.neutron [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Refreshing network info cache for port 1d86f1ba-f3dc-41a9-a77d-630670cb20a7 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1987.325952] env[61356]: DEBUG nova.network.neutron [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Updated VIF entry in instance network info cache for port 1d86f1ba-f3dc-41a9-a77d-630670cb20a7. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1987.326329] env[61356]: DEBUG nova.network.neutron [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Updating instance_info_cache with network_info: [{"id": "1d86f1ba-f3dc-41a9-a77d-630670cb20a7", "address": "fa:16:3e:86:fd:93", "network": {"id": "cf4b8e3d-edbd-4eff-a69d-181d9b32fa8e", "bridge": "br-int", "label": "tempest-ServersTestJSON-832994017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f6c093c00e44197ba6affbc1c4964bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d86f1ba-f3", "ovs_interfaceid": "1d86f1ba-f3dc-41a9-a77d-630670cb20a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.335503] env[61356]: DEBUG oslo_concurrency.lockutils [req-890cacd3-56c8-4424-81c7-103801736e55 req-c8859c99-3f86-4ac4-8b1c-2e6bfef98ed7 service nova] Releasing lock "refresh_cache-887b020c-3652-4724-a0d2-d7413325c5c1" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.421517] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.758316] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2011.771081] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.771314] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.771483] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.771637] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2011.772767] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44da883-f36b-4663-81af-6a5047631969 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.781311] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d22867-3610-40e3-8cf0-52085bbc9be6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.794569] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e5d84b-1d55-43c4-b7e7-1c7d74435979 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.800521] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229aa959-be50-4d20-a989-b067c77d229d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.828238] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180686MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2011.828369] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.828543] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.899021] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d6141da0-7822-45dc-902a-c9fc3ee73fef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899021] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 4f103733-7586-4241-a766-cf13bef9e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899021] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899021] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899021] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899021] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899331] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899331] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.899331] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 887b020c-3652-4724-a0d2-d7413325c5c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2011.925322] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2011.925322] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=110GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2012.015967] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c312206e-ecaa-4078-b9a6-d5d499458f95 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.023537] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89af921-b9cd-4bf1-ad61-58b60231c9e9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.053321] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cacce1-5b04-411c-b6e1-55879453b52b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.060795] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcf5add-08ba-423d-bd0f-59a719646be9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.073274] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2012.081575] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2012.097298] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2012.097485] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.269s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.758944] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2013.759330] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.766473] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2016.754464] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.144309] env[61356]: WARNING oslo_vmware.rw_handles [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.144309] env[61356]: ERROR oslo_vmware.rw_handles [ 2017.146284] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2017.147389] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2017.147655] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Copying Virtual Disk [datastore2] vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/9cd0100b-1deb-491e-8e91-54dd754fdbe3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2017.147961] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40b3b908-a8c9-44eb-bffe-4937b8d9ea67 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.155495] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 2017.155495] env[61356]: value = "task-4302676" [ 2017.155495] env[61356]: _type = "Task" [ 2017.155495] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.162858] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.665580] env[61356]: DEBUG oslo_vmware.exceptions [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2017.665866] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.666427] env[61356]: ERROR nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.666427] env[61356]: Faults: ['InvalidArgument'] [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Traceback (most recent call last): [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] yield resources [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self.driver.spawn(context, instance, image_meta, [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self._fetch_image_if_missing(context, vi) [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] image_cache(vi, tmp_image_ds_loc) [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] vm_util.copy_virtual_disk( [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] session._wait_for_task(vmdk_copy_task) [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] return self.wait_for_task(task_ref) [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] return evt.wait() [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] result = hub.switch() [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] return self.greenlet.switch() [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self.f(*self.args, **self.kw) [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] raise exceptions.translate_fault(task_info.error) [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Faults: ['InvalidArgument'] [ 2017.666427] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] [ 2017.667740] env[61356]: INFO nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Terminating instance [ 2017.668342] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.668527] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.668760] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f64497c4-74aa-4023-9333-920e9eff87f0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.670896] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2017.671097] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2017.671815] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740bcb6a-1cbd-40cb-8300-b4ac4c750e3d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.678382] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2017.678588] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa14692c-1624-4499-b015-5f40d92eb1b7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.680650] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.680823] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2017.681735] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b78f6f59-a6b9-454e-a376-5eea76b03a0a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.686119] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Waiting for the task: (returnval){ [ 2017.686119] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52427405-7f8c-1026-3771-6dd97049983e" [ 2017.686119] env[61356]: _type = "Task" [ 2017.686119] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.693094] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52427405-7f8c-1026-3771-6dd97049983e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.749882] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2017.750170] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2017.750420] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleting the datastore file [datastore2] d6141da0-7822-45dc-902a-c9fc3ee73fef {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2017.750709] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e138eb99-57b3-48a0-9276-5bdb87e92bb7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.757515] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 2017.757515] env[61356]: value = "task-4302678" [ 2017.757515] env[61356]: _type = "Task" [ 2017.757515] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.765672] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.197103] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2018.197103] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Creating directory with path [datastore2] vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.197103] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdf95410-761b-4e33-966e-35e216c716ed {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.207180] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Created directory with path [datastore2] vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.207392] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Fetch image to [datastore2] vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2018.207576] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2018.208298] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bef3b9f-499a-4b29-af33-cea05553c566 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.214749] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47507c98-3df9-4311-8211-8202f5875ebc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.223489] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e1c9c4-b603-43cf-9955-29317b9a9ccc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.253646] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2f5548-6dcb-4255-8c2b-5daaef39de9e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.261205] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-de45455b-efb8-4247-9a1c-3ab9b44ab1a4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.267068] env[61356]: DEBUG oslo_vmware.api [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06449} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.267300] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.267478] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2018.267647] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2018.267835] env[61356]: INFO nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2018.269946] env[61356]: DEBUG nova.compute.claims [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2018.270133] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.270342] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.280890] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2018.332978] env[61356]: DEBUG oslo_vmware.rw_handles [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2018.390951] env[61356]: DEBUG oslo_vmware.rw_handles [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2018.391215] env[61356]: DEBUG oslo_vmware.rw_handles [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2018.468318] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc41558-a280-4eb3-9b7c-1784c83c15e9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.475494] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f964169-fd95-4c0a-9c35-4d5bfaf6c13c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.504988] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1bf972-d9c6-4062-8e67-cf9c4fc857ad {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.511288] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca77c01-e8ea-4d35-b7d3-c573d4692a89 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.523468] env[61356]: DEBUG nova.compute.provider_tree [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.533115] env[61356]: DEBUG nova.scheduler.client.report [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2018.546330] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.276s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.546883] env[61356]: ERROR nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.546883] env[61356]: Faults: ['InvalidArgument'] [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Traceback (most recent call last): [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self.driver.spawn(context, instance, image_meta, [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self._fetch_image_if_missing(context, vi) [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] image_cache(vi, tmp_image_ds_loc) [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] vm_util.copy_virtual_disk( [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] session._wait_for_task(vmdk_copy_task) [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] return self.wait_for_task(task_ref) [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] return evt.wait() [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] result = hub.switch() [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] return self.greenlet.switch() [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] self.f(*self.args, **self.kw) [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] raise exceptions.translate_fault(task_info.error) [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Faults: ['InvalidArgument'] [ 2018.546883] env[61356]: ERROR nova.compute.manager [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] [ 2018.547620] env[61356]: DEBUG nova.compute.utils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2018.548959] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Build of instance d6141da0-7822-45dc-902a-c9fc3ee73fef was re-scheduled: A specified parameter was not correct: fileType [ 2018.548959] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2018.549376] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2018.549548] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2018.549720] env[61356]: DEBUG nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2018.549883] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2018.758502] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.758700] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.829771] env[61356]: DEBUG nova.network.neutron [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.844014] env[61356]: INFO nova.compute.manager [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Took 0.29 seconds to deallocate network for instance. [ 2018.958028] env[61356]: INFO nova.scheduler.client.report [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleted allocations for instance d6141da0-7822-45dc-902a-c9fc3ee73fef [ 2018.981927] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b598657-82bc-42f7-8d57-d2ea59aa04ad tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 638.600s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.981927] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 442.370s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.981927] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "d6141da0-7822-45dc-902a-c9fc3ee73fef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.982507] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.982507] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.984274] env[61356]: INFO nova.compute.manager [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Terminating instance [ 2018.985891] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.986121] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.986326] env[61356]: DEBUG nova.network.neutron [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2019.038544] env[61356]: DEBUG nova.network.neutron [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2019.489552] env[61356]: DEBUG nova.network.neutron [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.498372] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "refresh_cache-d6141da0-7822-45dc-902a-c9fc3ee73fef" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.498755] env[61356]: DEBUG nova.compute.manager [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2019.498946] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2019.499533] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b25b45fa-cc1d-4c41-89aa-69ac65f48e67 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.508841] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b900fdc-2465-4f23-9b2a-d772f8520d9a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.536681] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d6141da0-7822-45dc-902a-c9fc3ee73fef could not be found. [ 2019.536882] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2019.537080] env[61356]: INFO nova.compute.manager [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2019.537323] env[61356]: DEBUG oslo.service.loopingcall [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.537538] env[61356]: DEBUG nova.compute.manager [-] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2019.537630] env[61356]: DEBUG nova.network.neutron [-] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2019.554654] env[61356]: DEBUG nova.network.neutron [-] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2019.564926] env[61356]: DEBUG nova.network.neutron [-] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.572938] env[61356]: INFO nova.compute.manager [-] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] Took 0.04 seconds to deallocate network for instance. [ 2019.665172] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d2c52793-5aa7-42d8-ac38-af4a97a780e0 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.683s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.666049] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 294.632s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.666408] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d6141da0-7822-45dc-902a-c9fc3ee73fef] During sync_power_state the instance has a pending task (deleting). Skip. [ 2019.666604] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "d6141da0-7822-45dc-902a-c9fc3ee73fef" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.758104] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.758281] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2021.758114] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.758936] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2022.759261] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2022.759295] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2022.777652] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.777795] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.777926] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.778068] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.778198] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.778321] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.778438] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.778558] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2022.778676] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2030.758389] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.758676] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2030.768332] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] There are 0 instances to clean {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2040.758287] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.758649] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances with incomplete migration {{(pid=61356) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2066.110949] env[61356]: WARNING oslo_vmware.rw_handles [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2066.110949] env[61356]: ERROR oslo_vmware.rw_handles [ 2066.111503] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2066.113210] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2066.113463] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Copying Virtual Disk [datastore2] vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/d2c74732-e9b5-4f28-b422-0c92466a52c5/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2066.113759] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbff2135-0362-4ed5-8df1-0ec05efecb43 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.122945] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Waiting for the task: (returnval){ [ 2066.122945] env[61356]: value = "task-4302679" [ 2066.122945] env[61356]: _type = "Task" [ 2066.122945] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.130797] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Task: {'id': task-4302679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.633588] env[61356]: DEBUG oslo_vmware.exceptions [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2066.633869] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.634454] env[61356]: ERROR nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.634454] env[61356]: Faults: ['InvalidArgument'] [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] Traceback (most recent call last): [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] yield resources [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self.driver.spawn(context, instance, image_meta, [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self._fetch_image_if_missing(context, vi) [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] image_cache(vi, tmp_image_ds_loc) [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] vm_util.copy_virtual_disk( [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] session._wait_for_task(vmdk_copy_task) [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] return self.wait_for_task(task_ref) [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] return evt.wait() [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] result = hub.switch() [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] return self.greenlet.switch() [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self.f(*self.args, **self.kw) [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] raise exceptions.translate_fault(task_info.error) [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] Faults: ['InvalidArgument'] [ 2066.634454] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] [ 2066.635360] env[61356]: INFO nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Terminating instance [ 2066.636987] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.636987] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2066.636987] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-249a5b9e-9d2f-4119-8dda-d64acbad3420 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.639045] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2066.639279] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2066.639996] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea03e876-d41c-4cd4-b6e2-c61ebd982ba3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.646897] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2066.647139] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b87532cb-0b93-49aa-8179-f2040c851bea {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.649496] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2066.649705] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2066.650657] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-875aa0f0-7eda-409f-88eb-a248e13982ee {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.655777] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 2066.655777] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c09a58-ade2-24eb-1eb9-d826bde24dff" [ 2066.655777] env[61356]: _type = "Task" [ 2066.655777] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.663248] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52c09a58-ade2-24eb-1eb9-d826bde24dff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.715910] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2066.716149] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2066.716331] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Deleting the datastore file [datastore2] 4f103733-7586-4241-a766-cf13bef9e435 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2066.716593] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-499449f4-2b7c-40fc-a083-c21ed43adc05 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.722921] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Waiting for the task: (returnval){ [ 2066.722921] env[61356]: value = "task-4302681" [ 2066.722921] env[61356]: _type = "Task" [ 2066.722921] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.730233] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Task: {'id': task-4302681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.165981] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2067.166379] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating directory with path [datastore2] vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2067.166619] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4589e6b8-51f2-46e2-8727-e4efadbbfa4f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.177249] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Created directory with path [datastore2] vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2067.177456] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Fetch image to [datastore2] vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2067.177635] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2067.178355] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61cdfd9-bd35-4bec-a982-06210566c7a3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.184439] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd319e84-63d9-4c49-9a39-d199c16f6926 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.192877] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6578b0-07f0-41de-b989-4fdf5f9590a4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.222287] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c824d2-1417-414e-8df6-b12cd531ff80 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.233651] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b0ae7251-8261-4d0e-bfe6-999a310c2110 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.235335] env[61356]: DEBUG oslo_vmware.api [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Task: {'id': task-4302681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064933} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.235574] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.235756] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2067.235925] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2067.236111] env[61356]: INFO nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2067.238268] env[61356]: DEBUG nova.compute.claims [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2067.238457] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.239181] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.255128] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2067.311116] env[61356]: DEBUG oslo_vmware.rw_handles [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2067.370074] env[61356]: DEBUG oslo_vmware.rw_handles [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2067.370214] env[61356]: DEBUG oslo_vmware.rw_handles [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2067.453233] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4389e7-9df8-494b-a6ef-293d986125a4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.460570] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6bf11d-21db-4ab3-96da-39f709a3d17e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.489117] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f71adc-de47-4fd7-9095-ce8099928c97 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.495448] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323f4592-df3c-441b-8c10-ce6d682930c2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.507698] env[61356]: DEBUG nova.compute.provider_tree [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.516080] env[61356]: DEBUG nova.scheduler.client.report [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2067.528965] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.290s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.529624] env[61356]: ERROR nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.529624] env[61356]: Faults: ['InvalidArgument'] [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] Traceback (most recent call last): [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self.driver.spawn(context, instance, image_meta, [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self._fetch_image_if_missing(context, vi) [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] image_cache(vi, tmp_image_ds_loc) [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] vm_util.copy_virtual_disk( [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] session._wait_for_task(vmdk_copy_task) [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] return self.wait_for_task(task_ref) [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] return evt.wait() [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] result = hub.switch() [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] return self.greenlet.switch() [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] self.f(*self.args, **self.kw) [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] raise exceptions.translate_fault(task_info.error) [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] Faults: ['InvalidArgument'] [ 2067.529624] env[61356]: ERROR nova.compute.manager [instance: 4f103733-7586-4241-a766-cf13bef9e435] [ 2067.530452] env[61356]: DEBUG nova.compute.utils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2067.531725] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Build of instance 4f103733-7586-4241-a766-cf13bef9e435 was re-scheduled: A specified parameter was not correct: fileType [ 2067.531725] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2067.532114] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2067.532284] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2067.532452] env[61356]: DEBUG nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2067.532615] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2068.013033] env[61356]: DEBUG nova.network.neutron [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.023768] env[61356]: INFO nova.compute.manager [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Took 0.49 seconds to deallocate network for instance. [ 2068.112888] env[61356]: INFO nova.scheduler.client.report [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Deleted allocations for instance 4f103733-7586-4241-a766-cf13bef9e435 [ 2068.146184] env[61356]: DEBUG oslo_concurrency.lockutils [None req-634cd363-f8ce-41d2-9834-cae23ca95f4d tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "4f103733-7586-4241-a766-cf13bef9e435" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.319s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.146529] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "4f103733-7586-4241-a766-cf13bef9e435" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.105s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.146770] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Acquiring lock "4f103733-7586-4241-a766-cf13bef9e435-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.147033] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "4f103733-7586-4241-a766-cf13bef9e435-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.147764] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "4f103733-7586-4241-a766-cf13bef9e435-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.149747] env[61356]: INFO nova.compute.manager [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Terminating instance [ 2068.151774] env[61356]: DEBUG nova.compute.manager [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2068.151994] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2068.152565] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6dc358e5-fa0c-43cb-9ad0-c2cf03bf4e4d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.162037] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6895878-f348-496f-96f7-271060c6b92a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.189895] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4f103733-7586-4241-a766-cf13bef9e435 could not be found. [ 2068.190199] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2068.190313] env[61356]: INFO nova.compute.manager [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2068.190571] env[61356]: DEBUG oslo.service.loopingcall [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.190803] env[61356]: DEBUG nova.compute.manager [-] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2068.190900] env[61356]: DEBUG nova.network.neutron [-] [instance: 4f103733-7586-4241-a766-cf13bef9e435] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2068.214233] env[61356]: DEBUG nova.network.neutron [-] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.222293] env[61356]: INFO nova.compute.manager [-] [instance: 4f103733-7586-4241-a766-cf13bef9e435] Took 0.03 seconds to deallocate network for instance. [ 2068.306751] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7a694b5d-7e7f-4714-9022-a02f3dc14f85 tempest-ServersNegativeTestJSON-2075142593 tempest-ServersNegativeTestJSON-2075142593-project-member] Lock "4f103733-7586-4241-a766-cf13bef9e435" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.307675] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "4f103733-7586-4241-a766-cf13bef9e435" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 343.274s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.308410] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 4f103733-7586-4241-a766-cf13bef9e435] During sync_power_state the instance has a pending task (deleting). Skip. [ 2068.308410] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "4f103733-7586-4241-a766-cf13bef9e435" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.768071] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2073.780569] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.780802] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2073.780973] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2073.781149] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2073.782274] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7e1df4-3c61-4003-9065-9a8ca2e689cd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.790948] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b86062-2aaa-43e1-b90a-de607bfd7dc3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.804020] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dff425-54cc-422d-8830-1b27b02e0e08 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.809867] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aff802c-7943-44ff-b51a-c5cd2a250d8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.838615] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180684MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2073.839042] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.839042] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2073.904686] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2073.904860] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2073.904988] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2073.905130] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2073.905251] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2073.905378] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2073.905485] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 887b020c-3652-4724-a0d2-d7413325c5c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2073.905663] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2073.905798] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=110GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2073.921641] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing inventories for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2073.935703] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Updating ProviderTree inventory for provider cfb833eb-84f8-480d-866a-1ad2fb871484 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2073.935884] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Updating inventory in ProviderTree for provider cfb833eb-84f8-480d-866a-1ad2fb871484 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2073.946283] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing aggregate associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, aggregates: None {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2073.966369] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Refreshing trait associations for resource provider cfb833eb-84f8-480d-866a-1ad2fb871484, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61356) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2074.047520] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e429535-a5f4-4cfc-abe9-eb392357b17a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.054790] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516318d8-bafd-4811-8325-a4af54b5faf2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.083862] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecbbc2f-9aa6-4db2-91db-99edaeb089a0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.090480] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e29e2a-77e5-4da5-b30d-8c94b8f2d8c6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.103360] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2074.113176] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2074.126724] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2074.126896] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.288s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.119160] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.753552] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.757152] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.758127] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.758068] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.758838] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.759147] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2083.759379] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.759786] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2083.759786] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2083.776995] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2083.777160] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2083.777287] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2083.777414] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2083.777539] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2083.777700] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2083.777826] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2083.777946] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2083.778470] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.772623] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.342064] env[61356]: WARNING oslo_vmware.rw_handles [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2115.342064] env[61356]: ERROR oslo_vmware.rw_handles [ 2115.342064] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2115.344059] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2115.344305] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Copying Virtual Disk [datastore2] vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/598b54e9-a36c-4c12-b8b9-34adb120c5b8/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2115.344611] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a66a296-1abd-45d8-af5e-fa08982f2b58 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.352562] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 2115.352562] env[61356]: value = "task-4302682" [ 2115.352562] env[61356]: _type = "Task" [ 2115.352562] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.360083] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': task-4302682, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.862883] env[61356]: DEBUG oslo_vmware.exceptions [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2115.863192] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.863748] env[61356]: ERROR nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2115.863748] env[61356]: Faults: ['InvalidArgument'] [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Traceback (most recent call last): [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] yield resources [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self.driver.spawn(context, instance, image_meta, [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self._fetch_image_if_missing(context, vi) [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] image_cache(vi, tmp_image_ds_loc) [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] vm_util.copy_virtual_disk( [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] session._wait_for_task(vmdk_copy_task) [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] return self.wait_for_task(task_ref) [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] return evt.wait() [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] result = hub.switch() [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] return self.greenlet.switch() [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self.f(*self.args, **self.kw) [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] raise exceptions.translate_fault(task_info.error) [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Faults: ['InvalidArgument'] [ 2115.863748] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] [ 2115.864523] env[61356]: INFO nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Terminating instance [ 2115.865716] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.865937] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2115.866210] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6f5ed17-418f-49da-a5d2-3400aa32b5c3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.868591] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2115.868781] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2115.869510] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ce8493-2ebf-4495-afd6-8ef7ddf73097 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.876214] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2115.876442] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76f6a046-d505-435b-9ec8-6c09a7645c9b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.878622] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2115.878801] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2115.879784] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e3e1c52-df43-4e24-81b3-2faace2f5875 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.884436] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 2115.884436] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a1af21-fadf-77e3-a2a9-9995e291f542" [ 2115.884436] env[61356]: _type = "Task" [ 2115.884436] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.896709] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a1af21-fadf-77e3-a2a9-9995e291f542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.946976] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2115.947193] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2115.947468] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Deleting the datastore file [datastore2] bc3bcf07-24bf-4cf6-874e-69e24b615458 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2115.947824] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d240f19-08a9-4213-9c34-e048f1846c6b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.954060] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 2115.954060] env[61356]: value = "task-4302684" [ 2115.954060] env[61356]: _type = "Task" [ 2115.954060] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.961293] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': task-4302684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.394911] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2116.395281] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating directory with path [datastore2] vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2116.395326] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cf409a5-6d97-4e5d-ade0-60ac8cf47e6f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.406162] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Created directory with path [datastore2] vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2116.406343] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Fetch image to [datastore2] vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2116.406513] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2116.407209] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ada7ada-fd17-4831-a530-d76077939caf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.414434] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27bf8c79-a306-4f27-a34f-f1003239e183 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.423490] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43b8ef2-7c9d-4827-bac7-e70d304444d9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.452851] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c25ed7e-6988-4a2e-a6be-3df54e0eafdd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.463076] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-48df1a86-6621-47f0-8d9c-841ff58bba2e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.464650] env[61356]: DEBUG oslo_vmware.api [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': task-4302684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067292} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.464891] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2116.465079] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2116.465256] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2116.465427] env[61356]: INFO nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2116.467544] env[61356]: DEBUG nova.compute.claims [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2116.467719] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.467931] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.484892] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2116.536881] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2116.595958] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2116.596262] env[61356]: DEBUG oslo_vmware.rw_handles [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2116.654034] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53164d80-4e51-43a8-ac49-f4dd83507198 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.661551] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4786bf-fd9f-409c-820e-ef76e6f91729 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.691262] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731609a7-c5a6-4461-a5ee-f3673b90cf99 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.697981] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e6dcaa-c96d-4745-b207-c4e9cc55ffc4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.710553] env[61356]: DEBUG nova.compute.provider_tree [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2116.720378] env[61356]: DEBUG nova.scheduler.client.report [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2116.734263] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.266s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.734778] env[61356]: ERROR nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2116.734778] env[61356]: Faults: ['InvalidArgument'] [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Traceback (most recent call last): [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self.driver.spawn(context, instance, image_meta, [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self._fetch_image_if_missing(context, vi) [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] image_cache(vi, tmp_image_ds_loc) [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] vm_util.copy_virtual_disk( [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] session._wait_for_task(vmdk_copy_task) [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] return self.wait_for_task(task_ref) [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] return evt.wait() [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] result = hub.switch() [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] return self.greenlet.switch() [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] self.f(*self.args, **self.kw) [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] raise exceptions.translate_fault(task_info.error) [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Faults: ['InvalidArgument'] [ 2116.734778] env[61356]: ERROR nova.compute.manager [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] [ 2116.735584] env[61356]: DEBUG nova.compute.utils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2116.736840] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Build of instance bc3bcf07-24bf-4cf6-874e-69e24b615458 was re-scheduled: A specified parameter was not correct: fileType [ 2116.736840] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2116.737238] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2116.737413] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2116.737566] env[61356]: DEBUG nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2116.737736] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2117.243919] env[61356]: DEBUG nova.network.neutron [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.259739] env[61356]: INFO nova.compute.manager [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Took 0.52 seconds to deallocate network for instance. [ 2117.376373] env[61356]: INFO nova.scheduler.client.report [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Deleted allocations for instance bc3bcf07-24bf-4cf6-874e-69e24b615458 [ 2117.405254] env[61356]: DEBUG oslo_concurrency.lockutils [None req-093da620-7820-4ff9-902e-9fe2fff092d5 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.405s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.405678] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 392.371s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.405755] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] During sync_power_state the instance has a pending task (spawning). Skip. [ 2117.405997] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.406477] env[61356]: DEBUG oslo_concurrency.lockutils [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 180.386s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.406724] env[61356]: DEBUG oslo_concurrency.lockutils [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "bc3bcf07-24bf-4cf6-874e-69e24b615458-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.406955] env[61356]: DEBUG oslo_concurrency.lockutils [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.407142] env[61356]: DEBUG oslo_concurrency.lockutils [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.409384] env[61356]: INFO nova.compute.manager [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Terminating instance [ 2117.411393] env[61356]: DEBUG nova.compute.manager [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2117.411591] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2117.411861] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-143db6c3-f90b-4fac-a49d-4b48228df1af {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.421723] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04773199-1698-4011-acdb-7d8f661007ed {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.451191] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bc3bcf07-24bf-4cf6-874e-69e24b615458 could not be found. [ 2117.451410] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2117.451594] env[61356]: INFO nova.compute.manager [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2117.451850] env[61356]: DEBUG oslo.service.loopingcall [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2117.452084] env[61356]: DEBUG nova.compute.manager [-] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2117.452183] env[61356]: DEBUG nova.network.neutron [-] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2117.499131] env[61356]: DEBUG nova.network.neutron [-] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.506918] env[61356]: INFO nova.compute.manager [-] [instance: bc3bcf07-24bf-4cf6-874e-69e24b615458] Took 0.05 seconds to deallocate network for instance. [ 2117.590235] env[61356]: DEBUG oslo_concurrency.lockutils [None req-486221dd-7d3a-41df-8fe0-f8fec324598c tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "bc3bcf07-24bf-4cf6-874e-69e24b615458" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.630478] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquiring lock "55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.630806] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Lock "55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.642242] env[61356]: DEBUG nova.compute.manager [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2123.691846] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.692104] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.693556] env[61356]: INFO nova.compute.claims [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2123.818876] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1682a4-fd3c-48ca-a79a-acadb3fefc8c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.826447] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148c975f-5360-48cb-8e6b-ff4762b317da {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.855371] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35358d7f-1968-4635-8dc5-dfce4fde3c0b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.862011] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515f5c58-c8a1-476a-9ccb-a5ad6510592c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.875801] env[61356]: DEBUG nova.compute.provider_tree [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2123.884324] env[61356]: DEBUG nova.scheduler.client.report [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2123.897166] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.205s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.897622] env[61356]: DEBUG nova.compute.manager [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2123.933637] env[61356]: DEBUG nova.compute.utils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2123.934974] env[61356]: DEBUG nova.compute.manager [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2123.935072] env[61356]: DEBUG nova.network.neutron [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2123.946018] env[61356]: DEBUG nova.compute.manager [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2124.011076] env[61356]: DEBUG nova.policy [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1d414c0d56c4231b98fe9ef949b2bfc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff0c87c9d22d4f038e932528a4137514', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 2124.015861] env[61356]: DEBUG nova.compute.manager [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2124.040156] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2124.040421] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2124.040590] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2124.040780] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2124.041171] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2124.041171] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2124.041279] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2124.041434] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2124.041604] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2124.041770] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2124.041990] env[61356]: DEBUG nova.virt.hardware [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2124.042854] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243186a5-7c11-40a9-9d02-8ae6c92ee32b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.053135] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ee17c9-d865-426a-a14f-b21405bd4196 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.367757] env[61356]: DEBUG nova.network.neutron [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Successfully created port: 373635ce-1d0e-4120-bfcb-d2d9764672d3 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2125.112552] env[61356]: DEBUG nova.compute.manager [req-3ff25fc9-7747-48b9-b89b-b929017080a0 req-d7a065b4-4911-4a9f-8aa7-0c476eea06e9 service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Received event network-vif-plugged-373635ce-1d0e-4120-bfcb-d2d9764672d3 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2125.112812] env[61356]: DEBUG oslo_concurrency.lockutils [req-3ff25fc9-7747-48b9-b89b-b929017080a0 req-d7a065b4-4911-4a9f-8aa7-0c476eea06e9 service nova] Acquiring lock "55c9db5a-0c7a-4f19-aa8c-1394ac91b70c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.112982] env[61356]: DEBUG oslo_concurrency.lockutils [req-3ff25fc9-7747-48b9-b89b-b929017080a0 req-d7a065b4-4911-4a9f-8aa7-0c476eea06e9 service nova] Lock "55c9db5a-0c7a-4f19-aa8c-1394ac91b70c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.113219] env[61356]: DEBUG oslo_concurrency.lockutils [req-3ff25fc9-7747-48b9-b89b-b929017080a0 req-d7a065b4-4911-4a9f-8aa7-0c476eea06e9 service nova] Lock "55c9db5a-0c7a-4f19-aa8c-1394ac91b70c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.113395] env[61356]: DEBUG nova.compute.manager [req-3ff25fc9-7747-48b9-b89b-b929017080a0 req-d7a065b4-4911-4a9f-8aa7-0c476eea06e9 service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] No waiting events found dispatching network-vif-plugged-373635ce-1d0e-4120-bfcb-d2d9764672d3 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2125.113559] env[61356]: WARNING nova.compute.manager [req-3ff25fc9-7747-48b9-b89b-b929017080a0 req-d7a065b4-4911-4a9f-8aa7-0c476eea06e9 service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Received unexpected event network-vif-plugged-373635ce-1d0e-4120-bfcb-d2d9764672d3 for instance with vm_state building and task_state spawning. [ 2125.202711] env[61356]: DEBUG nova.network.neutron [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Successfully updated port: 373635ce-1d0e-4120-bfcb-d2d9764672d3 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2125.213540] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquiring lock "refresh_cache-55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.213696] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquired lock "refresh_cache-55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.213853] env[61356]: DEBUG nova.network.neutron [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2125.263520] env[61356]: DEBUG nova.network.neutron [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2125.427553] env[61356]: DEBUG nova.network.neutron [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Updating instance_info_cache with network_info: [{"id": "373635ce-1d0e-4120-bfcb-d2d9764672d3", "address": "fa:16:3e:93:36:94", "network": {"id": "75d58660-0cd0-4543-96e0-4c4ba0189299", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-429688229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff0c87c9d22d4f038e932528a4137514", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap373635ce-1d", "ovs_interfaceid": "373635ce-1d0e-4120-bfcb-d2d9764672d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2125.438403] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Releasing lock "refresh_cache-55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2125.438668] env[61356]: DEBUG nova.compute.manager [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Instance network_info: |[{"id": "373635ce-1d0e-4120-bfcb-d2d9764672d3", "address": "fa:16:3e:93:36:94", "network": {"id": "75d58660-0cd0-4543-96e0-4c4ba0189299", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-429688229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff0c87c9d22d4f038e932528a4137514", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap373635ce-1d", "ovs_interfaceid": "373635ce-1d0e-4120-bfcb-d2d9764672d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2125.439068] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:36:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '373635ce-1d0e-4120-bfcb-d2d9764672d3', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2125.446422] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Creating folder: Project (ff0c87c9d22d4f038e932528a4137514). Parent ref: group-v846926. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2125.446908] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab4a1f64-5252-471a-a6d1-3b26a93f1a1c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.458619] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Created folder: Project (ff0c87c9d22d4f038e932528a4137514) in parent group-v846926. [ 2125.458795] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Creating folder: Instances. Parent ref: group-v847042. {{(pid=61356) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2125.459017] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b06e09f-5164-4803-8888-c4588fd5533a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.466457] env[61356]: INFO nova.virt.vmwareapi.vm_util [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Created folder: Instances in parent group-v847042. [ 2125.466675] env[61356]: DEBUG oslo.service.loopingcall [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2125.466848] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2125.467040] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84ef2047-6979-411d-8fe2-a55c6607084d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.485761] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2125.485761] env[61356]: value = "task-4302687" [ 2125.485761] env[61356]: _type = "Task" [ 2125.485761] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.492748] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302687, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.997552] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302687, 'name': CreateVM_Task, 'duration_secs': 0.300219} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.997686] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2125.998457] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.998617] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.998965] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2125.999255] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa616659-f6a0-4d8f-89af-517100387794 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.003716] env[61356]: DEBUG oslo_vmware.api [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Waiting for the task: (returnval){ [ 2126.003716] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52779544-26bb-cb09-bc20-c305492722a7" [ 2126.003716] env[61356]: _type = "Task" [ 2126.003716] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.013639] env[61356]: DEBUG oslo_vmware.api [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52779544-26bb-cb09-bc20-c305492722a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.514233] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.514594] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2126.514693] env[61356]: DEBUG oslo_concurrency.lockutils [None req-2b79d97a-5920-4711-9114-743a63fcf925 tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2127.188545] env[61356]: DEBUG nova.compute.manager [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Received event network-changed-373635ce-1d0e-4120-bfcb-d2d9764672d3 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2127.188798] env[61356]: DEBUG nova.compute.manager [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Refreshing instance network info cache due to event network-changed-373635ce-1d0e-4120-bfcb-d2d9764672d3. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2127.189448] env[61356]: DEBUG oslo_concurrency.lockutils [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] Acquiring lock "refresh_cache-55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2127.190016] env[61356]: DEBUG oslo_concurrency.lockutils [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] Acquired lock "refresh_cache-55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2127.190261] env[61356]: DEBUG nova.network.neutron [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Refreshing network info cache for port 373635ce-1d0e-4120-bfcb-d2d9764672d3 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2127.430330] env[61356]: DEBUG nova.network.neutron [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Updated VIF entry in instance network info cache for port 373635ce-1d0e-4120-bfcb-d2d9764672d3. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2127.430684] env[61356]: DEBUG nova.network.neutron [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Updating instance_info_cache with network_info: [{"id": "373635ce-1d0e-4120-bfcb-d2d9764672d3", "address": "fa:16:3e:93:36:94", "network": {"id": "75d58660-0cd0-4543-96e0-4c4ba0189299", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-429688229-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff0c87c9d22d4f038e932528a4137514", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap373635ce-1d", "ovs_interfaceid": "373635ce-1d0e-4120-bfcb-d2d9764672d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2127.440293] env[61356]: DEBUG oslo_concurrency.lockutils [req-cbda27e8-a3a0-4ebd-9e73-fd99f697abdf req-c6d2bc84-5ab9-435b-8402-23f9564f813b service nova] Releasing lock "refresh_cache-55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.758336] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.758720] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.769844] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.770061] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.770239] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.770394] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2135.771476] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0142a3-7e31-4eba-8b43-3f2130cd07ac {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.779750] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8e5441-f6f0-4132-85e6-67a9cee1273a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.792876] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a9fab7-07d8-4770-b1b5-232fdb22f5f3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.798746] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8676110b-9ad1-47f4-a7d4-689ffb43d7ef {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.827521] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180690MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2135.827665] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.827833] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.899635] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.899801] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.899930] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.900068] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.900192] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.900310] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 887b020c-3652-4724-a0d2-d7413325c5c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.900427] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2135.900604] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2135.900770] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=110GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2135.988922] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff14e7cf-962d-4085-938b-100f7d4b6947 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.996113] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3d81f8-1f47-4a7e-ad31-b53e49e3b213 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.025013] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe8f721-fd55-4cfe-b508-b384b7e9d062 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.031980] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594dc7f1-b4c4-49e9-9cdd-fbe9d247d028 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.044521] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2136.052433] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2136.066532] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2136.066764] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.239s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.063130] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2139.759630] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.757654] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.757895] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.758628] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.758890] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2144.758637] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.758919] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2144.759146] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2144.777662] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2144.777822] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2144.777955] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2144.778097] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2144.778224] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2144.778344] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2144.778498] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2144.778627] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2145.358385] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "f2627e8b-2c0b-4cba-9920-98ac3686c919" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.358644] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "f2627e8b-2c0b-4cba-9920-98ac3686c919" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.369618] env[61356]: DEBUG nova.compute.manager [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2145.416992] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.417254] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.418971] env[61356]: INFO nova.compute.claims [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2145.556622] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ede8167-dc26-4d1b-b0df-061cc45648ba {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.564164] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ff9136-ad8d-4639-8d44-0059f2c65f5c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.592509] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94166fd-9edb-4e94-ba2c-4aade272d897 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.599271] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e038912-7d80-401c-aa50-d1bd98f5aef6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.613073] env[61356]: DEBUG nova.compute.provider_tree [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2145.621363] env[61356]: DEBUG nova.scheduler.client.report [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2145.634747] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.217s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.635195] env[61356]: DEBUG nova.compute.manager [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2145.665296] env[61356]: DEBUG nova.compute.utils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2145.666478] env[61356]: DEBUG nova.compute.manager [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2145.666639] env[61356]: DEBUG nova.network.neutron [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2145.678718] env[61356]: DEBUG nova.compute.manager [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2145.735664] env[61356]: DEBUG nova.policy [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '89c2ea569ffb416eb170549aaebb704a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fe9dfd77dff47cab342d5ebb764e770', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 2145.746853] env[61356]: DEBUG nova.compute.manager [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2145.757747] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.775343] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2145.775728] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2145.775774] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2145.775963] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2145.776193] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2145.776313] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2145.776577] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2145.776774] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2145.776957] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2145.777145] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2145.777341] env[61356]: DEBUG nova.virt.hardware [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2145.778264] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4198b0-1d42-4d8a-9fb7-58ea1e1121da {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.786580] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1dda970-5239-473c-b69c-52af48209ff3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.279363] env[61356]: DEBUG nova.network.neutron [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Successfully created port: e54abdc1-f9bf-40d4-9ea5-6e969980fa31 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2146.944114] env[61356]: DEBUG nova.compute.manager [req-decf388e-9932-40fc-80d9-399729620ff0 req-7978a409-74b3-4ac4-8df9-beb86013de58 service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Received event network-vif-plugged-e54abdc1-f9bf-40d4-9ea5-6e969980fa31 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2146.944388] env[61356]: DEBUG oslo_concurrency.lockutils [req-decf388e-9932-40fc-80d9-399729620ff0 req-7978a409-74b3-4ac4-8df9-beb86013de58 service nova] Acquiring lock "f2627e8b-2c0b-4cba-9920-98ac3686c919-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.944968] env[61356]: DEBUG oslo_concurrency.lockutils [req-decf388e-9932-40fc-80d9-399729620ff0 req-7978a409-74b3-4ac4-8df9-beb86013de58 service nova] Lock "f2627e8b-2c0b-4cba-9920-98ac3686c919-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.945165] env[61356]: DEBUG oslo_concurrency.lockutils [req-decf388e-9932-40fc-80d9-399729620ff0 req-7978a409-74b3-4ac4-8df9-beb86013de58 service nova] Lock "f2627e8b-2c0b-4cba-9920-98ac3686c919-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.945342] env[61356]: DEBUG nova.compute.manager [req-decf388e-9932-40fc-80d9-399729620ff0 req-7978a409-74b3-4ac4-8df9-beb86013de58 service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] No waiting events found dispatching network-vif-plugged-e54abdc1-f9bf-40d4-9ea5-6e969980fa31 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2146.945503] env[61356]: WARNING nova.compute.manager [req-decf388e-9932-40fc-80d9-399729620ff0 req-7978a409-74b3-4ac4-8df9-beb86013de58 service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Received unexpected event network-vif-plugged-e54abdc1-f9bf-40d4-9ea5-6e969980fa31 for instance with vm_state building and task_state spawning. [ 2147.054600] env[61356]: DEBUG nova.network.neutron [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Successfully updated port: e54abdc1-f9bf-40d4-9ea5-6e969980fa31 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2147.065571] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "refresh_cache-f2627e8b-2c0b-4cba-9920-98ac3686c919" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.066070] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "refresh_cache-f2627e8b-2c0b-4cba-9920-98ac3686c919" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2147.066610] env[61356]: DEBUG nova.network.neutron [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2147.164269] env[61356]: DEBUG nova.network.neutron [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2147.376061] env[61356]: DEBUG nova.network.neutron [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Updating instance_info_cache with network_info: [{"id": "e54abdc1-f9bf-40d4-9ea5-6e969980fa31", "address": "fa:16:3e:b9:ae:3e", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape54abdc1-f9", "ovs_interfaceid": "e54abdc1-f9bf-40d4-9ea5-6e969980fa31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2147.390252] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "refresh_cache-f2627e8b-2c0b-4cba-9920-98ac3686c919" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2147.390834] env[61356]: DEBUG nova.compute.manager [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Instance network_info: |[{"id": "e54abdc1-f9bf-40d4-9ea5-6e969980fa31", "address": "fa:16:3e:b9:ae:3e", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape54abdc1-f9", "ovs_interfaceid": "e54abdc1-f9bf-40d4-9ea5-6e969980fa31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2147.391557] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:ae:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae4e3171-21cd-4094-b6cf-81bf366c75bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e54abdc1-f9bf-40d4-9ea5-6e969980fa31', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2147.399017] env[61356]: DEBUG oslo.service.loopingcall [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2147.400031] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2147.400031] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69ae3108-f45a-4d07-ab83-e3f2bd79fd60 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.421527] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2147.421527] env[61356]: value = "task-4302688" [ 2147.421527] env[61356]: _type = "Task" [ 2147.421527] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.429498] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302688, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.933194] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302688, 'name': CreateVM_Task, 'duration_secs': 0.31925} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.933388] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2147.934055] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.934217] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2147.934541] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2147.934786] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c644174-2bd0-4efb-8c92-8a00a4be1b74 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.939109] env[61356]: DEBUG oslo_vmware.api [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 2147.939109] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5273ae33-9973-0e69-aab3-d89dfb030646" [ 2147.939109] env[61356]: _type = "Task" [ 2147.939109] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.945986] env[61356]: DEBUG oslo_vmware.api [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5273ae33-9973-0e69-aab3-d89dfb030646, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.449568] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.449837] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2148.450067] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c77f0db9-bd45-4419-ab0b-c739c5bf86f0 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.095368] env[61356]: DEBUG nova.compute.manager [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Received event network-changed-e54abdc1-f9bf-40d4-9ea5-6e969980fa31 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2149.095643] env[61356]: DEBUG nova.compute.manager [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Refreshing instance network info cache due to event network-changed-e54abdc1-f9bf-40d4-9ea5-6e969980fa31. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2149.095841] env[61356]: DEBUG oslo_concurrency.lockutils [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] Acquiring lock "refresh_cache-f2627e8b-2c0b-4cba-9920-98ac3686c919" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.095898] env[61356]: DEBUG oslo_concurrency.lockutils [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] Acquired lock "refresh_cache-f2627e8b-2c0b-4cba-9920-98ac3686c919" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.096143] env[61356]: DEBUG nova.network.neutron [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Refreshing network info cache for port e54abdc1-f9bf-40d4-9ea5-6e969980fa31 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2149.369373] env[61356]: DEBUG nova.network.neutron [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Updated VIF entry in instance network info cache for port e54abdc1-f9bf-40d4-9ea5-6e969980fa31. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2149.369924] env[61356]: DEBUG nova.network.neutron [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Updating instance_info_cache with network_info: [{"id": "e54abdc1-f9bf-40d4-9ea5-6e969980fa31", "address": "fa:16:3e:b9:ae:3e", "network": {"id": "dc06d0f9-1b64-489c-911a-fd1686a93f61", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-665827523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe9dfd77dff47cab342d5ebb764e770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape54abdc1-f9", "ovs_interfaceid": "e54abdc1-f9bf-40d4-9ea5-6e969980fa31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2149.379324] env[61356]: DEBUG oslo_concurrency.lockutils [req-efd42422-a8ad-4593-b05f-ea96abb99b91 req-0737e782-40e9-4ea7-b79e-0382cf95d58f service nova] Releasing lock "refresh_cache-f2627e8b-2c0b-4cba-9920-98ac3686c919" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2163.787195] env[61356]: WARNING oslo_vmware.rw_handles [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2163.787195] env[61356]: ERROR oslo_vmware.rw_handles [ 2163.787739] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2163.789965] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2163.790272] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Copying Virtual Disk [datastore2] vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/b90232b3-7ae6-4b7b-80af-bfadb652c316/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2163.790613] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2230d275-2d01-43c0-823b-9d0ed0ae367b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.799102] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 2163.799102] env[61356]: value = "task-4302689" [ 2163.799102] env[61356]: _type = "Task" [ 2163.799102] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.806655] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': task-4302689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.308981] env[61356]: DEBUG oslo_vmware.exceptions [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2164.309283] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.309856] env[61356]: ERROR nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2164.309856] env[61356]: Faults: ['InvalidArgument'] [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] yield resources [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.driver.spawn(context, instance, image_meta, [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._fetch_image_if_missing(context, vi) [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] image_cache(vi, tmp_image_ds_loc) [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] vm_util.copy_virtual_disk( [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] session._wait_for_task(vmdk_copy_task) [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.wait_for_task(task_ref) [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return evt.wait() [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] result = hub.switch() [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.greenlet.switch() [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.f(*self.args, **self.kw) [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise exceptions.translate_fault(task_info.error) [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Faults: ['InvalidArgument'] [ 2164.309856] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2164.310807] env[61356]: INFO nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Terminating instance [ 2164.312918] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.312918] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2164.312918] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b18e38f-8eb1-4861-9d94-6707458ab172 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.317156] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2164.317156] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2164.317156] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addf7378-6c42-4a3e-8fc2-248cd122c2ae {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.323402] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2164.324375] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-491df181-7421-408d-bd15-ca5fae3def9d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.325883] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2164.326024] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2164.326670] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-420e8d97-50bc-42b0-a8e3-ec080e950ec3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.332212] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for the task: (returnval){ [ 2164.332212] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]529a6710-5173-c1cd-97f9-d5234fdc5a8f" [ 2164.332212] env[61356]: _type = "Task" [ 2164.332212] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.342420] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]529a6710-5173-c1cd-97f9-d5234fdc5a8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.394813] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2164.395105] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2164.395339] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Deleting the datastore file [datastore2] d53189a7-249f-48ea-ab30-3ee6242e0dc0 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2164.395645] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-972395e2-25fb-4fa0-9538-1426a2d77dce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.402561] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for the task: (returnval){ [ 2164.402561] env[61356]: value = "task-4302691" [ 2164.402561] env[61356]: _type = "Task" [ 2164.402561] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.411135] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': task-4302691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2164.844394] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2164.844654] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating directory with path [datastore2] vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2164.844881] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8134007a-3860-4903-879f-494362593718 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.858827] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Created directory with path [datastore2] vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2164.859032] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Fetch image to [datastore2] vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2164.859208] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2164.859915] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4515ef63-545a-458d-87aa-9f4ceb402625 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.866062] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809f2514-bd8f-4054-969d-f70a7af92c03 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.874605] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d929e72c-45c3-47e8-9ec4-980e2975a215 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.906747] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380638be-df85-4f6c-bd84-64288d83775b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.913341] env[61356]: DEBUG oslo_vmware.api [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Task: {'id': task-4302691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072953} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2164.914755] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2164.914947] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2164.915177] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2164.915361] env[61356]: INFO nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2164.917216] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4841d9f2-3bc6-46a0-b60f-638b521f53ec {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.919108] env[61356]: DEBUG nova.compute.claims [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2164.919258] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.919469] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2164.943448] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2164.996314] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2165.057900] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2165.057900] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2165.125774] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0a195f-6b65-4696-a7cf-64866a77bcc7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.132932] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04afa6b-142a-430a-99de-bd7b1ca269c1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.164868] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd91811-8fdc-484d-b9e8-26aa259d7845 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.172335] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c9a20d-c989-447d-8330-d13853ee05e3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.185165] env[61356]: DEBUG nova.compute.provider_tree [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2165.194561] env[61356]: DEBUG nova.scheduler.client.report [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2165.221237] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.302s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.221773] env[61356]: ERROR nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2165.221773] env[61356]: Faults: ['InvalidArgument'] [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.driver.spawn(context, instance, image_meta, [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._fetch_image_if_missing(context, vi) [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] image_cache(vi, tmp_image_ds_loc) [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] vm_util.copy_virtual_disk( [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] session._wait_for_task(vmdk_copy_task) [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.wait_for_task(task_ref) [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return evt.wait() [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] result = hub.switch() [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.greenlet.switch() [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.f(*self.args, **self.kw) [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise exceptions.translate_fault(task_info.error) [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Faults: ['InvalidArgument'] [ 2165.221773] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.222635] env[61356]: DEBUG nova.compute.utils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2165.224060] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Build of instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 was re-scheduled: A specified parameter was not correct: fileType [ 2165.224060] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2165.224455] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2165.224627] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2165.224781] env[61356]: DEBUG nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2165.224944] env[61356]: DEBUG nova.network.neutron [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2165.335475] env[61356]: DEBUG neutronclient.v2_0.client [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61356) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2165.336394] env[61356]: ERROR nova.compute.manager [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.driver.spawn(context, instance, image_meta, [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._fetch_image_if_missing(context, vi) [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] image_cache(vi, tmp_image_ds_loc) [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] vm_util.copy_virtual_disk( [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] session._wait_for_task(vmdk_copy_task) [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.wait_for_task(task_ref) [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return evt.wait() [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] result = hub.switch() [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.greenlet.switch() [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.f(*self.args, **self.kw) [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise exceptions.translate_fault(task_info.error) [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Faults: ['InvalidArgument'] [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] During handling of the above exception, another exception occurred: [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._build_and_run_instance(context, instance, image, [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise exception.RescheduledException( [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] nova.exception.RescheduledException: Build of instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 was re-scheduled: A specified parameter was not correct: fileType [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Faults: ['InvalidArgument'] [ 2165.336394] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] During handling of the above exception, another exception occurred: [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] exception_handler_v20(status_code, error_body) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise client_exc(message=error_message, [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Neutron server returns request_ids: ['req-b4da4d39-9d03-4841-a3c9-1afc69c33a93'] [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] During handling of the above exception, another exception occurred: [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._deallocate_network(context, instance, requested_networks) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.network_api.deallocate_for_instance( [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] data = neutron.list_ports(**search_opts) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.list('ports', self.ports_path, retrieve_all, [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] for r in self._pagination(collection, path, **params): [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] res = self.get(path, params=params) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.retry_request("GET", action, body=body, [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2165.338128] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.do_request(method, action, body=body, [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._handle_fault_response(status_code, replybody, resp) [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise exception.Unauthorized() [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] nova.exception.Unauthorized: Not authorized. [ 2165.339740] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.410958] env[61356]: INFO nova.scheduler.client.report [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Deleted allocations for instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 [ 2165.438486] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7cf6af69-c233-4c67-86e7-714a791be957 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.331s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.438767] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 440.404s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.438965] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] During sync_power_state the instance has a pending task (spawning). Skip. [ 2165.439154] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.439665] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.411s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.439902] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Acquiring lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.440103] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.440278] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.442213] env[61356]: INFO nova.compute.manager [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Terminating instance [ 2165.443896] env[61356]: DEBUG nova.compute.manager [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2165.444104] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2165.444364] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1881d453-f2a0-4932-9b7d-2af34f0ca2ce {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.453789] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af6d86e-d2dd-4935-9248-5f0970bed92f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.481296] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d53189a7-249f-48ea-ab30-3ee6242e0dc0 could not be found. [ 2165.481509] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2165.481689] env[61356]: INFO nova.compute.manager [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2165.481937] env[61356]: DEBUG oslo.service.loopingcall [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2165.482180] env[61356]: DEBUG nova.compute.manager [-] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2165.482274] env[61356]: DEBUG nova.network.neutron [-] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2165.574388] env[61356]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61356) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2165.574655] env[61356]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-c7eb7bcb-895e-41a6-a554-61463eac2a7d'] [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2165.575205] env[61356]: ERROR oslo.service.loopingcall [ 2165.576589] env[61356]: ERROR nova.compute.manager [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2165.610891] env[61356]: ERROR nova.compute.manager [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] exception_handler_v20(status_code, error_body) [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise client_exc(message=error_message, [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Neutron server returns request_ids: ['req-c7eb7bcb-895e-41a6-a554-61463eac2a7d'] [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] During handling of the above exception, another exception occurred: [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Traceback (most recent call last): [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._delete_instance(context, instance, bdms) [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._shutdown_instance(context, instance, bdms) [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._try_deallocate_network(context, instance, requested_networks) [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] with excutils.save_and_reraise_exception(): [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.force_reraise() [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise self.value [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] _deallocate_network_with_retries() [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return evt.wait() [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] result = hub.switch() [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.greenlet.switch() [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] result = func(*self.args, **self.kw) [ 2165.610891] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] result = f(*args, **kwargs) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._deallocate_network( [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self.network_api.deallocate_for_instance( [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] data = neutron.list_ports(**search_opts) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.list('ports', self.ports_path, retrieve_all, [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] for r in self._pagination(collection, path, **params): [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] res = self.get(path, params=params) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.retry_request("GET", action, body=body, [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] return self.do_request(method, action, body=body, [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] ret = obj(*args, **kwargs) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] self._handle_fault_response(status_code, replybody, resp) [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2165.612092] env[61356]: ERROR nova.compute.manager [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] [ 2165.638622] env[61356]: DEBUG oslo_concurrency.lockutils [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Lock "d53189a7-249f-48ea-ab30-3ee6242e0dc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.690073] env[61356]: INFO nova.compute.manager [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] [instance: d53189a7-249f-48ea-ab30-3ee6242e0dc0] Successfully reverted task state from None on failure for instance. [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server [None req-f485374d-496c-45de-90d8-5df65dc77287 tempest-ListImageFiltersTestJSON-1823852567 tempest-ListImageFiltersTestJSON-1823852567-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-c7eb7bcb-895e-41a6-a554-61463eac2a7d'] [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2165.693537] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server raise self.value [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2165.694922] env[61356]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2165.696327] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2165.696327] env[61356]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2165.696327] env[61356]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2165.696327] env[61356]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2165.696327] env[61356]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2165.696327] env[61356]: ERROR oslo_messaging.rpc.server [ 2166.898128] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "0f222fb6-d822-4d36-80be-8072497dd691" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.898479] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "0f222fb6-d822-4d36-80be-8072497dd691" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.910080] env[61356]: DEBUG nova.compute.manager [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Starting instance... {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2166.957392] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.957655] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.959113] env[61356]: INFO nova.compute.claims [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2167.099942] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73403c6c-5203-43d2-9834-ded93e46fd94 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.107347] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffb6bb3-7031-46bf-8c3f-986c63449821 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.136816] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd931bf-89ff-4d76-a82d-59c371d6f0e2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.143692] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a638e478-5779-4c89-868c-6ef20e264738 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.156477] env[61356]: DEBUG nova.compute.provider_tree [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2167.165277] env[61356]: DEBUG nova.scheduler.client.report [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2167.181230] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.223s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.181715] env[61356]: DEBUG nova.compute.manager [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Start building networks asynchronously for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2167.216794] env[61356]: DEBUG nova.compute.utils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Using /dev/sd instead of None {{(pid=61356) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2167.217969] env[61356]: DEBUG nova.compute.manager [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Allocating IP information in the background. {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2167.218161] env[61356]: DEBUG nova.network.neutron [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] allocate_for_instance() {{(pid=61356) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2167.226705] env[61356]: DEBUG nova.compute.manager [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Start building block device mappings for instance. {{(pid=61356) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2167.281957] env[61356]: DEBUG nova.policy [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '877bb82afe8d42ae8f54d1e963c0244b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a8faf03438643ad85e91c93c88ce86d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61356) authorize /opt/stack/nova/nova/policy.py:203}} [ 2167.286862] env[61356]: DEBUG nova.compute.manager [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Start spawning the instance on the hypervisor. {{(pid=61356) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2167.315962] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-06-27T18:40:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-06-27T18:40:20Z,direct_url=,disk_format='vmdk',id=88d3b9c3-6614-40fd-b1c9-50fb806e539a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b8f636a049984f5791c955c43f06725d',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-06-27T18:40:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2167.316220] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2167.316381] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image limits 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2167.316562] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Flavor pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2167.316726] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Image pref 0:0:0 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2167.316878] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61356) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2167.317098] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2167.317261] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2167.317431] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Got 1 possible topologies {{(pid=61356) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2167.317592] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2167.317766] env[61356]: DEBUG nova.virt.hardware [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61356) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2167.318637] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1c1599-48d2-4ff9-a61b-28784a33f769 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.327404] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d29828-5dd0-4cd6-ab10-9a4941d265f8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.594989] env[61356]: DEBUG nova.network.neutron [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Successfully created port: b4de3b00-92a8-4bbc-818c-d6df8d340878 {{(pid=61356) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2168.643825] env[61356]: DEBUG nova.compute.manager [req-0a960ffe-3fcb-43fe-90cf-afd8b0e45fcc req-9b03f232-df03-44b3-923b-394e90f040ef service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Received event network-vif-plugged-b4de3b00-92a8-4bbc-818c-d6df8d340878 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2168.643825] env[61356]: DEBUG oslo_concurrency.lockutils [req-0a960ffe-3fcb-43fe-90cf-afd8b0e45fcc req-9b03f232-df03-44b3-923b-394e90f040ef service nova] Acquiring lock "0f222fb6-d822-4d36-80be-8072497dd691-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.643825] env[61356]: DEBUG oslo_concurrency.lockutils [req-0a960ffe-3fcb-43fe-90cf-afd8b0e45fcc req-9b03f232-df03-44b3-923b-394e90f040ef service nova] Lock "0f222fb6-d822-4d36-80be-8072497dd691-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.643825] env[61356]: DEBUG oslo_concurrency.lockutils [req-0a960ffe-3fcb-43fe-90cf-afd8b0e45fcc req-9b03f232-df03-44b3-923b-394e90f040ef service nova] Lock "0f222fb6-d822-4d36-80be-8072497dd691-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.643825] env[61356]: DEBUG nova.compute.manager [req-0a960ffe-3fcb-43fe-90cf-afd8b0e45fcc req-9b03f232-df03-44b3-923b-394e90f040ef service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] No waiting events found dispatching network-vif-plugged-b4de3b00-92a8-4bbc-818c-d6df8d340878 {{(pid=61356) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2168.643825] env[61356]: WARNING nova.compute.manager [req-0a960ffe-3fcb-43fe-90cf-afd8b0e45fcc req-9b03f232-df03-44b3-923b-394e90f040ef service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Received unexpected event network-vif-plugged-b4de3b00-92a8-4bbc-818c-d6df8d340878 for instance with vm_state building and task_state spawning. [ 2169.185428] env[61356]: DEBUG nova.network.neutron [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Successfully updated port: b4de3b00-92a8-4bbc-818c-d6df8d340878 {{(pid=61356) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2169.198299] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "refresh_cache-0f222fb6-d822-4d36-80be-8072497dd691" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.198299] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "refresh_cache-0f222fb6-d822-4d36-80be-8072497dd691" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.198299] env[61356]: DEBUG nova.network.neutron [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Building network info cache for instance {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2169.269210] env[61356]: DEBUG nova.compute.manager [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Received event network-changed-b4de3b00-92a8-4bbc-818c-d6df8d340878 {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11154}} [ 2169.269210] env[61356]: DEBUG nova.compute.manager [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Refreshing instance network info cache due to event network-changed-b4de3b00-92a8-4bbc-818c-d6df8d340878. {{(pid=61356) external_instance_event /opt/stack/nova/nova/compute/manager.py:11159}} [ 2169.269210] env[61356]: DEBUG oslo_concurrency.lockutils [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] Acquiring lock "refresh_cache-0f222fb6-d822-4d36-80be-8072497dd691" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2169.270184] env[61356]: DEBUG nova.network.neutron [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Instance cache missing network info. {{(pid=61356) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2169.498781] env[61356]: DEBUG nova.network.neutron [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Updating instance_info_cache with network_info: [{"id": "b4de3b00-92a8-4bbc-818c-d6df8d340878", "address": "fa:16:3e:e1:44:ee", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4de3b00-92", "ovs_interfaceid": "b4de3b00-92a8-4bbc-818c-d6df8d340878", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.512551] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "refresh_cache-0f222fb6-d822-4d36-80be-8072497dd691" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2169.512869] env[61356]: DEBUG nova.compute.manager [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Instance network_info: |[{"id": "b4de3b00-92a8-4bbc-818c-d6df8d340878", "address": "fa:16:3e:e1:44:ee", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4de3b00-92", "ovs_interfaceid": "b4de3b00-92a8-4bbc-818c-d6df8d340878", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61356) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2169.513186] env[61356]: DEBUG oslo_concurrency.lockutils [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] Acquired lock "refresh_cache-0f222fb6-d822-4d36-80be-8072497dd691" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2169.513356] env[61356]: DEBUG nova.network.neutron [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Refreshing network info cache for port b4de3b00-92a8-4bbc-818c-d6df8d340878 {{(pid=61356) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2169.514391] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:44:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db328342-7107-4bac-b1d6-111fbd5780f1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4de3b00-92a8-4bbc-818c-d6df8d340878', 'vif_model': 'vmxnet3'}] {{(pid=61356) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2169.522530] env[61356]: DEBUG oslo.service.loopingcall [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2169.523351] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Creating VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2169.526033] env[61356]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66384b7e-67a1-4984-908e-8b46768b29d5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.545200] env[61356]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2169.545200] env[61356]: value = "task-4302692" [ 2169.545200] env[61356]: _type = "Task" [ 2169.545200] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.552847] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302692, 'name': CreateVM_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2169.789446] env[61356]: DEBUG nova.network.neutron [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Updated VIF entry in instance network info cache for port b4de3b00-92a8-4bbc-818c-d6df8d340878. {{(pid=61356) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2169.789902] env[61356]: DEBUG nova.network.neutron [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Updating instance_info_cache with network_info: [{"id": "b4de3b00-92a8-4bbc-818c-d6df8d340878", "address": "fa:16:3e:e1:44:ee", "network": {"id": "72a3c817-91ec-40cf-b041-355e729a14d0", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2024050929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a8faf03438643ad85e91c93c88ce86d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db328342-7107-4bac-b1d6-111fbd5780f1", "external-id": "nsx-vlan-transportzone-827", "segmentation_id": 827, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4de3b00-92", "ovs_interfaceid": "b4de3b00-92a8-4bbc-818c-d6df8d340878", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.800070] env[61356]: DEBUG oslo_concurrency.lockutils [req-ef712b08-4ba8-4d83-8af7-91b300d45eec req-addecec2-e05e-41d2-943e-7899ada5f8fe service nova] Releasing lock "refresh_cache-0f222fb6-d822-4d36-80be-8072497dd691" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.055311] env[61356]: DEBUG oslo_vmware.api [-] Task: {'id': task-4302692, 'name': CreateVM_Task, 'duration_secs': 0.289838} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.055458] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Created VM on the ESX host {{(pid=61356) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2170.056082] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.056255] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.056569] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2170.056844] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7acc68b0-f79c-46fd-8d9b-c66c3d250732 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.061444] env[61356]: DEBUG oslo_vmware.api [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 2170.061444] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527a7f7c-04a6-03a3-8f01-c00c0f59db94" [ 2170.061444] env[61356]: _type = "Task" [ 2170.061444] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.070112] env[61356]: DEBUG oslo_vmware.api [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]527a7f7c-04a6-03a3-8f01-c00c0f59db94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.572235] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.572488] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Processing image 88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2170.572746] env[61356]: DEBUG oslo_concurrency.lockutils [None req-953b7fd1-44d7-43c6-b69e-5c5e8e31752a tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2178.998573] env[61356]: DEBUG oslo_concurrency.lockutils [None req-cb89a8cd-3528-4a5d-be1f-72808b327409 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "887b020c-3652-4724-a0d2-d7413325c5c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.758624] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.758052] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.770165] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.770165] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2197.770255] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2197.770375] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2197.771607] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4157a1-370f-49c4-aafb-c69c2814218b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.783016] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5b1e20-37b7-424d-b3f0-8e66ad18024b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.793917] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab3b624-0c04-4650-95cc-ec76dbd1aacf {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.800140] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892f568b-284e-427e-8576-03ca7bad7e31 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.829930] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180648MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2197.830101] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.830340] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2197.901891] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.902073] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.902209] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.902333] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.902454] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 887b020c-3652-4724-a0d2-d7413325c5c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.902574] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.902699] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f2627e8b-2c0b-4cba-9920-98ac3686c919 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.902819] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 0f222fb6-d822-4d36-80be-8072497dd691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2197.906204] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2197.906400] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=110GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2198.000728] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5282bd-78d2-455a-b6d0-83f6234ebee0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.008345] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b766a8-2a68-4b93-9676-31d1af7ad178 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.036741] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c0e35b-ddb8-4046-976d-32a38c97fa24 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.043512] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19dd9a8-c3d0-4b56-8b57-8deedfe00139 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.056151] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2198.065759] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2198.083727] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2198.083912] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.254s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.079695] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.759400] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.759798] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.758549] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.759328] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.759687] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2205.758658] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.759038] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2205.759038] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2205.778019] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778019] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778019] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778019] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778019] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778019] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778019] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778490] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2205.778752] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2205.779386] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.774504] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2212.207410] env[61356]: WARNING oslo_vmware.rw_handles [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2212.207410] env[61356]: ERROR oslo_vmware.rw_handles [ 2212.208080] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2212.209812] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2212.210065] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Copying Virtual Disk [datastore2] vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/b48320a2-eca6-4200-afed-0166047d6c81/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2212.210350] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf35c0dd-a664-410d-984c-ad5a3d4944c7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.220042] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for the task: (returnval){ [ 2212.220042] env[61356]: value = "task-4302693" [ 2212.220042] env[61356]: _type = "Task" [ 2212.220042] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.227759] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Task: {'id': task-4302693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.730685] env[61356]: DEBUG oslo_vmware.exceptions [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2212.730960] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2212.731590] env[61356]: ERROR nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2212.731590] env[61356]: Faults: ['InvalidArgument'] [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Traceback (most recent call last): [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] yield resources [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self.driver.spawn(context, instance, image_meta, [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self._fetch_image_if_missing(context, vi) [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] image_cache(vi, tmp_image_ds_loc) [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] vm_util.copy_virtual_disk( [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] session._wait_for_task(vmdk_copy_task) [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] return self.wait_for_task(task_ref) [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] return evt.wait() [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] result = hub.switch() [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] return self.greenlet.switch() [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self.f(*self.args, **self.kw) [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] raise exceptions.translate_fault(task_info.error) [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Faults: ['InvalidArgument'] [ 2212.731590] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] [ 2212.732460] env[61356]: INFO nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Terminating instance [ 2212.733533] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2212.733748] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2212.733984] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d846b3d-7be7-4959-a9f4-fb68f8341ae4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.736482] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2212.736682] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2212.737404] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a194a1a-6891-440d-b680-82927157bba9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.743931] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2212.744157] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae26633b-5070-4a5b-850e-530604300bc3 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.746215] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2212.746391] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2212.747434] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8366a480-40b7-410b-b822-27dd38418f7e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.752375] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 2212.752375] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5214c193-b023-caef-ba50-f39aeda46b46" [ 2212.752375] env[61356]: _type = "Task" [ 2212.752375] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.759882] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5214c193-b023-caef-ba50-f39aeda46b46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.810428] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2212.810670] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2212.810855] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Deleting the datastore file [datastore2] d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2212.811129] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-984d117d-c002-4af1-ab60-92e1682e7767 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.817458] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for the task: (returnval){ [ 2212.817458] env[61356]: value = "task-4302695" [ 2212.817458] env[61356]: _type = "Task" [ 2212.817458] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.824923] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Task: {'id': task-4302695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.263359] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2213.263664] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating directory with path [datastore2] vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2213.263799] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9d6bf24-8242-4033-8443-d4ca35914dff {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.274281] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Created directory with path [datastore2] vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2213.274468] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Fetch image to [datastore2] vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2213.274635] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2213.275339] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae872d76-272f-43d6-8d80-dc6a0c97b25f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.281586] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a212b1-88f6-4b7d-8852-38f1d12c159b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.290207] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4531e0c-ced1-4eb8-847c-f0ca492285fb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.324218] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424c8da1-1286-40a1-9c8f-9ba8e6a985bc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.330824] env[61356]: DEBUG oslo_vmware.api [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Task: {'id': task-4302695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077285} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.332195] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2213.332388] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2213.332557] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2213.332724] env[61356]: INFO nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2213.334478] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fa2b6615-6e90-4803-89f9-bbfce8394d8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.336279] env[61356]: DEBUG nova.compute.claims [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2213.336448] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2213.336659] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2213.357968] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2213.410979] env[61356]: DEBUG oslo_vmware.rw_handles [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2213.468484] env[61356]: DEBUG oslo_vmware.rw_handles [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2213.468675] env[61356]: DEBUG oslo_vmware.rw_handles [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2213.527879] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5b17a4-cf0c-499b-aba3-9378a2ccdd6f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.535289] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b204d5ff-b2e8-4717-a562-d5372be41e11 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.564257] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd85b50-b208-46eb-b2d1-3e18ee085261 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.570669] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c50fac-1dd0-41f3-a74b-1bc87b5e230c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.583846] env[61356]: DEBUG nova.compute.provider_tree [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2213.592106] env[61356]: DEBUG nova.scheduler.client.report [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2213.605146] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.268s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2213.605656] env[61356]: ERROR nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2213.605656] env[61356]: Faults: ['InvalidArgument'] [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Traceback (most recent call last): [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self.driver.spawn(context, instance, image_meta, [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self._fetch_image_if_missing(context, vi) [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] image_cache(vi, tmp_image_ds_loc) [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] vm_util.copy_virtual_disk( [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] session._wait_for_task(vmdk_copy_task) [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] return self.wait_for_task(task_ref) [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] return evt.wait() [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] result = hub.switch() [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] return self.greenlet.switch() [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] self.f(*self.args, **self.kw) [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] raise exceptions.translate_fault(task_info.error) [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Faults: ['InvalidArgument'] [ 2213.605656] env[61356]: ERROR nova.compute.manager [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] [ 2213.606520] env[61356]: DEBUG nova.compute.utils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2213.607732] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Build of instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e was re-scheduled: A specified parameter was not correct: fileType [ 2213.607732] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2213.608122] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2213.608296] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2213.608467] env[61356]: DEBUG nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2213.608628] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2213.908031] env[61356]: DEBUG nova.network.neutron [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2213.919951] env[61356]: INFO nova.compute.manager [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Took 0.31 seconds to deallocate network for instance. [ 2214.013843] env[61356]: INFO nova.scheduler.client.report [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Deleted allocations for instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e [ 2214.039228] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9f60d92b-eeed-4069-8d1a-cc3b5cf3c2c1 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.350s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.039531] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.746s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.039734] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquiring lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.039942] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.040122] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.042290] env[61356]: INFO nova.compute.manager [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Terminating instance [ 2214.044162] env[61356]: DEBUG nova.compute.manager [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2214.044281] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2214.044718] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a85a37e-4030-440d-bc22-c452233181aa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.054286] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6771ab12-0564-42b2-961e-5393b6c35d5d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.081409] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e could not be found. [ 2214.081614] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2214.081793] env[61356]: INFO nova.compute.manager [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2214.082040] env[61356]: DEBUG oslo.service.loopingcall [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2214.082259] env[61356]: DEBUG nova.compute.manager [-] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2214.082345] env[61356]: DEBUG nova.network.neutron [-] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2214.117259] env[61356]: DEBUG nova.network.neutron [-] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2214.126496] env[61356]: INFO nova.compute.manager [-] [instance: d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e] Took 0.04 seconds to deallocate network for instance. [ 2214.255625] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7682e35f-811b-4215-acc5-4945bb130fbe tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Lock "d2e12e5c-ebd6-4ddf-a50c-e8f19fee1b6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2257.758116] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.753699] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2259.758876] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2259.771831] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.772050] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.772238] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2259.772386] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2259.773577] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55822053-f034-46a3-9619-72e43324c53b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.782786] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7093ce6d-9b38-4819-a593-c26c5cb27c8f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.796160] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e397f93c-6840-47ff-926e-f1db63aa7e1b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.802105] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4309f6b3-aea5-4a25-a9cf-94f250571654 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.830316] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180668MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2259.830450] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.830634] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.892607] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 13efd018-f491-4979-87b8-0b4af68aaf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2259.892767] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2259.892895] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2259.893027] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 887b020c-3652-4724-a0d2-d7413325c5c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2259.893156] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2259.893273] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f2627e8b-2c0b-4cba-9920-98ac3686c919 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2259.893391] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 0f222fb6-d822-4d36-80be-8072497dd691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2259.893570] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2259.893704] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=110GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2259.982039] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f26cc7-1403-4fdf-b709-dae889c4ffd1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.989146] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58bed3f-9409-49e3-a792-eb5bd340956d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.019023] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d13a69-8053-4c08-ad9a-a4670d257c0e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.026030] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a15f42-24e9-445a-adfd-cbc424d7326e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.038431] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2260.047288] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2260.067258] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2260.067258] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.236s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.223028] env[61356]: WARNING oslo_vmware.rw_handles [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2262.223028] env[61356]: ERROR oslo_vmware.rw_handles [ 2262.223740] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2262.225302] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2262.225543] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Copying Virtual Disk [datastore2] vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/748e545c-92f3-411a-b15e-af5aa0c1156c/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2262.225819] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-610c0db5-7dad-42a6-8df4-64e94c1a9ad9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.236894] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 2262.236894] env[61356]: value = "task-4302696" [ 2262.236894] env[61356]: _type = "Task" [ 2262.236894] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.244465] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': task-4302696, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.747204] env[61356]: DEBUG oslo_vmware.exceptions [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2262.747457] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2262.748038] env[61356]: ERROR nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2262.748038] env[61356]: Faults: ['InvalidArgument'] [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Traceback (most recent call last): [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] yield resources [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self.driver.spawn(context, instance, image_meta, [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self._fetch_image_if_missing(context, vi) [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] image_cache(vi, tmp_image_ds_loc) [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] vm_util.copy_virtual_disk( [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] session._wait_for_task(vmdk_copy_task) [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] return self.wait_for_task(task_ref) [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] return evt.wait() [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] result = hub.switch() [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] return self.greenlet.switch() [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self.f(*self.args, **self.kw) [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] raise exceptions.translate_fault(task_info.error) [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Faults: ['InvalidArgument'] [ 2262.748038] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] [ 2262.748839] env[61356]: INFO nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Terminating instance [ 2262.749973] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2262.750216] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2262.750456] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d393d8fa-554c-45bf-9835-7692defb63ab {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.752571] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2262.752765] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2262.753480] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e447c054-5b90-48b5-b5da-03df3d658f5f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.760303] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2262.761222] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6a8d712-8c6e-4868-b25f-170429a0146c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.762628] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2262.762807] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2262.763480] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c1d387c-f7e4-4adb-a33a-ff40f1ccc87f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.768059] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 2262.768059] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5203d603-60e3-0fea-5add-fc5dbf3b80ed" [ 2262.768059] env[61356]: _type = "Task" [ 2262.768059] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.774910] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]5203d603-60e3-0fea-5add-fc5dbf3b80ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.829924] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2262.830186] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2262.830369] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Deleting the datastore file [datastore2] 13efd018-f491-4979-87b8-0b4af68aaf1b {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2262.830628] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41250299-7f2a-4ab7-801d-ce5084e26daa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.836440] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for the task: (returnval){ [ 2262.836440] env[61356]: value = "task-4302698" [ 2262.836440] env[61356]: _type = "Task" [ 2262.836440] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.843589] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': task-4302698, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.066475] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2263.066705] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2263.278447] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2263.278790] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating directory with path [datastore2] vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2263.278909] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7771450-8158-43c2-b627-d7df39b66ed9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.292296] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Created directory with path [datastore2] vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2263.292499] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Fetch image to [datastore2] vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2263.292649] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2263.293385] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefd3c9a-9df8-4a16-8c47-ff9e394109f5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.299617] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4371b2-6fd4-4d47-b53c-12fb638f1156 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.308188] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ed6f8a-0f0a-4e07-afc3-a8943e96eaa5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.339984] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4ba4b5-80eb-4598-817a-ad7670a27241 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.346279] env[61356]: DEBUG oslo_vmware.api [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Task: {'id': task-4302698, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084801} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.347659] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2263.347842] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2263.348069] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2263.348264] env[61356]: INFO nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2263.350044] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b737a201-30ea-45c9-bcdc-abdc7ac3673e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.351842] env[61356]: DEBUG nova.compute.claims [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2263.352028] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2263.352244] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2263.374288] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2263.430434] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2263.488033] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2263.488233] env[61356]: DEBUG oslo_vmware.rw_handles [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2263.537565] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db98f82-0b06-48c0-b109-cb3237ce2f9b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.544925] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e48d2ed-0fa4-40e8-9f21-f464432b2282 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.575697] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9ffa2b-eb73-46c9-851e-b32262262ae2 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.582364] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a9f2cd-22ab-45bb-8da8-07db4a3195d5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.594770] env[61356]: DEBUG nova.compute.provider_tree [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2263.604949] env[61356]: DEBUG nova.scheduler.client.report [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2263.621405] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.269s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2263.621951] env[61356]: ERROR nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2263.621951] env[61356]: Faults: ['InvalidArgument'] [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Traceback (most recent call last): [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self.driver.spawn(context, instance, image_meta, [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self._fetch_image_if_missing(context, vi) [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] image_cache(vi, tmp_image_ds_loc) [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] vm_util.copy_virtual_disk( [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] session._wait_for_task(vmdk_copy_task) [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] return self.wait_for_task(task_ref) [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] return evt.wait() [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] result = hub.switch() [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] return self.greenlet.switch() [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] self.f(*self.args, **self.kw) [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] raise exceptions.translate_fault(task_info.error) [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Faults: ['InvalidArgument'] [ 2263.621951] env[61356]: ERROR nova.compute.manager [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] [ 2263.622919] env[61356]: DEBUG nova.compute.utils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2263.625097] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Build of instance 13efd018-f491-4979-87b8-0b4af68aaf1b was re-scheduled: A specified parameter was not correct: fileType [ 2263.625097] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2263.625097] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2263.625097] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2263.625097] env[61356]: DEBUG nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2263.625097] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2263.758419] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2263.997015] env[61356]: DEBUG nova.network.neutron [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2264.007699] env[61356]: INFO nova.compute.manager [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Took 0.38 seconds to deallocate network for instance. [ 2264.102811] env[61356]: INFO nova.scheduler.client.report [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Deleted allocations for instance 13efd018-f491-4979-87b8-0b4af68aaf1b [ 2264.147909] env[61356]: DEBUG oslo_concurrency.lockutils [None req-28f69f8d-d84a-47c9-84bc-a66e8d84c09d tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 511.401s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2264.148234] env[61356]: DEBUG oslo_concurrency.lockutils [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 315.187s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.148512] env[61356]: DEBUG oslo_concurrency.lockutils [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "13efd018-f491-4979-87b8-0b4af68aaf1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.148689] env[61356]: DEBUG oslo_concurrency.lockutils [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.148861] env[61356]: DEBUG oslo_concurrency.lockutils [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2264.151181] env[61356]: INFO nova.compute.manager [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Terminating instance [ 2264.153123] env[61356]: DEBUG nova.compute.manager [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2264.153352] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2264.153958] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64ff3d9c-63b5-4f5c-b060-969552dcf499 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.163356] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e71d28c-07b1-4b49-be3b-7e929112ae48 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.190848] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 13efd018-f491-4979-87b8-0b4af68aaf1b could not be found. [ 2264.191102] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2264.191299] env[61356]: INFO nova.compute.manager [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2264.191553] env[61356]: DEBUG oslo.service.loopingcall [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2264.191778] env[61356]: DEBUG nova.compute.manager [-] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2264.191872] env[61356]: DEBUG nova.network.neutron [-] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2264.228102] env[61356]: DEBUG nova.network.neutron [-] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2264.240530] env[61356]: INFO nova.compute.manager [-] [instance: 13efd018-f491-4979-87b8-0b4af68aaf1b] Took 0.05 seconds to deallocate network for instance. [ 2264.337217] env[61356]: DEBUG oslo_concurrency.lockutils [None req-420ad66c-7566-4564-931e-1c1fc6cab2d9 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Lock "13efd018-f491-4979-87b8-0b4af68aaf1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2264.758317] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.758486] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2265.759257] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.759616] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2265.759616] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2265.776214] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2265.776395] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2265.776504] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2265.776630] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2265.776755] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2265.776874] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2265.777034] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2265.777562] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2311.203620] env[61356]: WARNING oslo_vmware.rw_handles [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2311.203620] env[61356]: ERROR oslo_vmware.rw_handles [ 2311.204543] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2311.205953] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2311.206236] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Copying Virtual Disk [datastore2] vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/8a3680fe-1693-4aa9-b361-36a315a913e4/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2311.206518] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-346d6fac-4e82-4fa1-bc64-b1fa323ad81e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.214365] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 2311.214365] env[61356]: value = "task-4302699" [ 2311.214365] env[61356]: _type = "Task" [ 2311.214365] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.221976] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.725137] env[61356]: DEBUG oslo_vmware.exceptions [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2311.725387] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2311.725942] env[61356]: ERROR nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.725942] env[61356]: Faults: ['InvalidArgument'] [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Traceback (most recent call last): [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] yield resources [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self.driver.spawn(context, instance, image_meta, [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self._fetch_image_if_missing(context, vi) [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] image_cache(vi, tmp_image_ds_loc) [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] vm_util.copy_virtual_disk( [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] session._wait_for_task(vmdk_copy_task) [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] return self.wait_for_task(task_ref) [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] return evt.wait() [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] result = hub.switch() [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] return self.greenlet.switch() [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self.f(*self.args, **self.kw) [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] raise exceptions.translate_fault(task_info.error) [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Faults: ['InvalidArgument'] [ 2311.725942] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] [ 2311.727078] env[61356]: INFO nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Terminating instance [ 2311.727809] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2311.728028] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.728264] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3501167-7c22-472f-bf67-8f78615ae2fc {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.730356] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2311.730552] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2311.731273] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ac0e82-3ade-4fad-82d8-e557c7133267 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.737965] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2311.738184] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8225e41-4ce6-47d9-acfb-e7ad18019295 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.740115] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.740293] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2311.741222] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7d0f0c6-8063-4e93-b3af-af9f7ab1b5e9 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.745480] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Waiting for the task: (returnval){ [ 2311.745480] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a91bba-282b-106c-20b2-943e268eb77f" [ 2311.745480] env[61356]: _type = "Task" [ 2311.745480] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.755974] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a91bba-282b-106c-20b2-943e268eb77f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.799925] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2311.800142] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2311.800317] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleting the datastore file [datastore2] 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2311.800582] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c1a02ef-8966-4838-b7c7-25808dfdc36f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.807091] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for the task: (returnval){ [ 2311.807091] env[61356]: value = "task-4302701" [ 2311.807091] env[61356]: _type = "Task" [ 2311.807091] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.814341] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302701, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.256028] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2312.256406] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Creating directory with path [datastore2] vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2312.256544] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d7c5f9e-804f-41f2-a971-9e9d966929a7 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.267356] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Created directory with path [datastore2] vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2312.267541] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Fetch image to [datastore2] vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2312.267733] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2312.268432] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957aca3b-6bf6-4969-b674-bd903453d02c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.275689] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba5e1f0-95a0-43aa-a203-e969f24b6ab4 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.284273] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21885503-9c7f-4c02-9534-563e1b02907e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.317061] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c47e45-4692-48f0-af87-7e2c30cc2a9f {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.324065] env[61356]: DEBUG oslo_vmware.api [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Task: {'id': task-4302701, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074865} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.325482] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2312.325693] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2312.325897] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2312.326087] env[61356]: INFO nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2312.328229] env[61356]: DEBUG nova.compute.claims [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2312.328401] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.328623] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.332532] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cd827596-721b-4f68-9651-00ec7b51f715 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.352494] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2312.409252] env[61356]: DEBUG oslo_vmware.rw_handles [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2312.471822] env[61356]: DEBUG oslo_vmware.rw_handles [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2312.472010] env[61356]: DEBUG oslo_vmware.rw_handles [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2312.517072] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb24e2f-49c4-421a-b234-b99db11d3210 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.525025] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade79297-c790-42cb-b177-2b1d093d0eae {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.555395] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17b8222-ab90-4920-9b77-330b22973eda {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.562108] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a9ca3e-3303-47ba-9590-a81daad0659b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.574771] env[61356]: DEBUG nova.compute.provider_tree [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2312.583659] env[61356]: DEBUG nova.scheduler.client.report [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2312.598829] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.270s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.599399] env[61356]: ERROR nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2312.599399] env[61356]: Faults: ['InvalidArgument'] [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Traceback (most recent call last): [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self.driver.spawn(context, instance, image_meta, [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self._fetch_image_if_missing(context, vi) [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] image_cache(vi, tmp_image_ds_loc) [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] vm_util.copy_virtual_disk( [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] session._wait_for_task(vmdk_copy_task) [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] return self.wait_for_task(task_ref) [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] return evt.wait() [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] result = hub.switch() [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] return self.greenlet.switch() [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] self.f(*self.args, **self.kw) [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] raise exceptions.translate_fault(task_info.error) [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Faults: ['InvalidArgument'] [ 2312.599399] env[61356]: ERROR nova.compute.manager [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] [ 2312.600425] env[61356]: DEBUG nova.compute.utils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2312.601575] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Build of instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 was re-scheduled: A specified parameter was not correct: fileType [ 2312.601575] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2312.601968] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2312.602160] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2312.602332] env[61356]: DEBUG nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2312.602496] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2312.887444] env[61356]: DEBUG nova.network.neutron [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.899074] env[61356]: INFO nova.compute.manager [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Took 0.30 seconds to deallocate network for instance. [ 2312.990888] env[61356]: INFO nova.scheduler.client.report [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Deleted allocations for instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 [ 2313.012245] env[61356]: DEBUG oslo_concurrency.lockutils [None req-9ed3e57a-5fef-49b5-b41f-e281ab33cba1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 539.517s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.012522] env[61356]: DEBUG oslo_concurrency.lockutils [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 343.289s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.012758] env[61356]: DEBUG oslo_concurrency.lockutils [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2313.012959] env[61356]: DEBUG oslo_concurrency.lockutils [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.013390] env[61356]: DEBUG oslo_concurrency.lockutils [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.015177] env[61356]: INFO nova.compute.manager [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Terminating instance [ 2313.016939] env[61356]: DEBUG nova.compute.manager [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2313.017173] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2313.017656] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-524e9dd2-b1fd-4cf2-bb77-54864034c95d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.026488] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a42548b-1275-44fd-89d1-a531d84d8140 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.051995] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920 could not be found. [ 2313.052222] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2313.052401] env[61356]: INFO nova.compute.manager [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2313.052677] env[61356]: DEBUG oslo.service.loopingcall [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2313.052932] env[61356]: DEBUG nova.compute.manager [-] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2313.053042] env[61356]: DEBUG nova.network.neutron [-] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2313.083126] env[61356]: DEBUG nova.network.neutron [-] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2313.090930] env[61356]: INFO nova.compute.manager [-] [instance: 9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920] Took 0.04 seconds to deallocate network for instance. [ 2313.175380] env[61356]: DEBUG oslo_concurrency.lockutils [None req-68ecd337-d63e-4c36-a2dd-9480b7e675a1 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Lock "9be2f9f6-fec4-4b8d-8e85-9e4bbe1f7920" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2316.758784] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.766589] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.009704] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.010217] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Getting list of instances from cluster (obj){ [ 2319.010217] env[61356]: value = "domain-c8" [ 2319.010217] env[61356]: _type = "ClusterComputeResource" [ 2319.010217] env[61356]: } {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2319.011330] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06a4d4e-35b9-49ff-bb57-4c8fd6c6ba32 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.024091] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Got total of 5 instances {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2319.756290] env[61356]: DEBUG oslo_concurrency.lockutils [None req-03d2ca75-aea7-4864-b121-0c1f729c10cd tempest-ServerActionsTestJSON-1466029563 tempest-ServerActionsTestJSON-1466029563-project-member] Acquiring lock "55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.785872] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.786059] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.796414] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.796624] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.796793] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2319.796950] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61356) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2319.798009] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d4268f-66ad-466b-9ded-d4450db6d21e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.806724] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde0e663-e18b-42e0-9b89-28500728f049 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.820636] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4570ca79-0f6a-4f2d-aa46-655deb0dd0fb {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.826783] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a88988-52ec-4849-ad01-5fb1d4e384e5 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.856178] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180679MB free_disk=107GB free_vcpus=48 pci_devices=None {{(pid=61356) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2319.856670] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2319.856670] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2319.911562] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2319.911722] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 887b020c-3652-4724-a0d2-d7413325c5c1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2319.911877] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2319.912062] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance f2627e8b-2c0b-4cba-9920-98ac3686c919 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2319.912140] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Instance 0f222fb6-d822-4d36-80be-8072497dd691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61356) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2319.912307] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2319.912438] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=110GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61356) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2319.980805] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1721c97-ae27-48ee-87ec-b93d4e47d99a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.988747] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41186c02-e42a-46c1-ac64-90de89ea9ca6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.021370] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75825205-451a-48b5-b474-f433cecbf14a {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.028850] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf255e18-824f-4d0b-923f-0fe868feb62d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2320.042790] env[61356]: DEBUG nova.compute.provider_tree [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2320.052489] env[61356]: DEBUG nova.scheduler.client.report [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2320.065648] env[61356]: DEBUG nova.compute.resource_tracker [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61356) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2320.065829] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.209s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2323.037601] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2323.758558] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.759137] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.759137] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.759562] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.759562] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61356) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10570}} [ 2327.760223] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2327.760531] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Starting heal instance info cache {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9951}} [ 2327.760531] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Rebuilding the list of instances to heal {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9955}} [ 2327.775260] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2327.775418] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2327.775540] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2327.775661] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: f2627e8b-2c0b-4cba-9920-98ac3686c919] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2327.775785] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 0f222fb6-d822-4d36-80be-8072497dd691] Skipping network cache update for instance because it is Building. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9964}} [ 2327.775906] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Didn't find any instances for network info cache update. {{(pid=61356) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10037}} [ 2333.759455] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.759768] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11251}} [ 2333.768905] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] There are 0 instances to clean {{(pid=61356) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11260}} [ 2335.763780] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.758983] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2340.759399] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Cleaning up deleted instances with incomplete migration {{(pid=61356) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11289}} [ 2341.493803] env[61356]: DEBUG oslo_concurrency.lockutils [None req-d15ae654-013d-442a-a51f-318583becb54 tempest-ServerDiskConfigTestJSON-660159837 tempest-ServerDiskConfigTestJSON-660159837-project-member] Acquiring lock "f2627e8b-2c0b-4cba-9920-98ac3686c919" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2349.996197] env[61356]: DEBUG oslo_service.periodic_task [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61356) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2350.013833] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Getting list of instances from cluster (obj){ [ 2350.013833] env[61356]: value = "domain-c8" [ 2350.013833] env[61356]: _type = "ClusterComputeResource" [ 2350.013833] env[61356]: } {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2350.015138] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdba28b-aca7-41e7-911e-54d8a7e47c3b {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2350.027539] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Got total of 5 instances {{(pid=61356) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2350.027714] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 7f078f06-c61b-4215-a95b-25ba3adaf5fd {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2350.027903] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 887b020c-3652-4724-a0d2-d7413325c5c1 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2350.028084] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 55c9db5a-0c7a-4f19-aa8c-1394ac91b70c {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2350.028252] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid f2627e8b-2c0b-4cba-9920-98ac3686c919 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2350.028406] env[61356]: DEBUG nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Triggering sync for uuid 0f222fb6-d822-4d36-80be-8072497dd691 {{(pid=61356) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10361}} [ 2350.028706] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.028962] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "887b020c-3652-4724-a0d2-d7413325c5c1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.029231] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "55c9db5a-0c7a-4f19-aa8c-1394ac91b70c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.029449] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "f2627e8b-2c0b-4cba-9920-98ac3686c919" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2350.029647] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Acquiring lock "0f222fb6-d822-4d36-80be-8072497dd691" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.988205] env[61356]: WARNING oslo_vmware.rw_handles [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles response.begin() [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2360.988205] env[61356]: ERROR oslo_vmware.rw_handles [ 2360.988993] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Downloaded image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2360.990673] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Caching image {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2360.990930] env[61356]: DEBUG nova.virt.vmwareapi.vm_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Copying Virtual Disk [datastore2] vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk to [datastore2] vmware_temp/293123ea-c1cd-452a-b92f-428c0377e5e3/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk {{(pid=61356) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2360.991271] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-370d0b79-51a6-468b-add0-3c82a4107cf6 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.999559] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Waiting for the task: (returnval){ [ 2360.999559] env[61356]: value = "task-4302702" [ 2360.999559] env[61356]: _type = "Task" [ 2360.999559] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.008488] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Task: {'id': task-4302702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.510213] env[61356]: DEBUG oslo_vmware.exceptions [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Fault InvalidArgument not matched. {{(pid=61356) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2361.510458] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Releasing lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2361.511107] env[61356]: ERROR nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2361.511107] env[61356]: Faults: ['InvalidArgument'] [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Traceback (most recent call last): [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] yield resources [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self.driver.spawn(context, instance, image_meta, [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self._fetch_image_if_missing(context, vi) [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] image_cache(vi, tmp_image_ds_loc) [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] vm_util.copy_virtual_disk( [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] session._wait_for_task(vmdk_copy_task) [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] return self.wait_for_task(task_ref) [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] return evt.wait() [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] result = hub.switch() [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] return self.greenlet.switch() [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self.f(*self.args, **self.kw) [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] raise exceptions.translate_fault(task_info.error) [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Faults: ['InvalidArgument'] [ 2361.511107] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] [ 2361.512650] env[61356]: INFO nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Terminating instance [ 2361.513088] env[61356]: DEBUG oslo_concurrency.lockutils [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Acquired lock "[datastore2] devstack-image-cache_base/88d3b9c3-6614-40fd-b1c9-50fb806e539a/88d3b9c3-6614-40fd-b1c9-50fb806e539a.vmdk" {{(pid=61356) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2361.513300] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2361.513561] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e7a7886-a3cb-433a-bfc3-3697dc9d1011 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.515902] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2361.516108] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2361.516819] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc198a1-edb2-43fe-ac6c-fc567b953b52 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.523220] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Unregistering the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2361.523429] env[61356]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a27196b1-4495-4ccc-bf94-888c8c7fd3aa {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.525533] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2361.525707] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61356) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2361.526627] env[61356]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f11c6af-e273-4c8b-b674-2eaa02c5b22d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.531263] env[61356]: DEBUG oslo_vmware.api [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Waiting for the task: (returnval){ [ 2361.531263] env[61356]: value = "session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a4ab44-bacf-4cfd-465d-b76e7f3969ab" [ 2361.531263] env[61356]: _type = "Task" [ 2361.531263] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.537933] env[61356]: DEBUG oslo_vmware.api [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Task: {'id': session[52549dbf-ee5e-26e9-a39d-d48195a9e09b]52a4ab44-bacf-4cfd-465d-b76e7f3969ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.593258] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Unregistered the VM {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2361.593514] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Deleting contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2361.593691] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Deleting the datastore file [datastore2] 7f078f06-c61b-4215-a95b-25ba3adaf5fd {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2361.593927] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3875b93-c63a-4ab3-86cc-043c00b73d44 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.600305] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Waiting for the task: (returnval){ [ 2361.600305] env[61356]: value = "task-4302704" [ 2361.600305] env[61356]: _type = "Task" [ 2361.600305] env[61356]: } to complete. {{(pid=61356) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.607554] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Task: {'id': task-4302704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.041553] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Preparing fetch location {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2362.041925] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating directory with path [datastore2] vmware_temp/a85527b2-34f4-47cb-b70b-b816af7eda4d/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2362.042039] env[61356]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2ebe002-31ce-4fc3-af0f-47e5fedffc8d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.052488] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Created directory with path [datastore2] vmware_temp/a85527b2-34f4-47cb-b70b-b816af7eda4d/88d3b9c3-6614-40fd-b1c9-50fb806e539a {{(pid=61356) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2362.052681] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Fetch image to [datastore2] vmware_temp/a85527b2-34f4-47cb-b70b-b816af7eda4d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk {{(pid=61356) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2362.052850] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to [datastore2] vmware_temp/a85527b2-34f4-47cb-b70b-b816af7eda4d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk on the data store datastore2 {{(pid=61356) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2362.053558] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3df2c9-dd9f-4706-ba40-4da1278b5e15 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.059886] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d767eb-5568-40d8-a87c-07eb68f6d922 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.068682] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aceb2c2-af0d-491e-85a0-e12cba5a8eb0 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.098471] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845aa042-0372-4795-b199-e57a5a69d527 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.109541] env[61356]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-36484718-07f6-4106-95cf-061aec8e74cd {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.111112] env[61356]: DEBUG oslo_vmware.api [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Task: {'id': task-4302704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074007} completed successfully. {{(pid=61356) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2362.111380] env[61356]: DEBUG nova.virt.vmwareapi.ds_util [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Deleted the datastore file {{(pid=61356) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2362.111564] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Deleted contents of the VM from datastore datastore2 {{(pid=61356) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2362.111729] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2362.111897] env[61356]: INFO nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2362.114079] env[61356]: DEBUG nova.compute.claims [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Aborting claim: {{(pid=61356) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2362.114262] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.114473] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.137457] env[61356]: DEBUG nova.virt.vmwareapi.images [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] [instance: 887b020c-3652-4724-a0d2-d7413325c5c1] Downloading image file data 88d3b9c3-6614-40fd-b1c9-50fb806e539a to the data store datastore2 {{(pid=61356) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2362.188791] env[61356]: DEBUG oslo_vmware.rw_handles [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a85527b2-34f4-47cb-b70b-b816af7eda4d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2362.248501] env[61356]: DEBUG oslo_vmware.rw_handles [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Completed reading data from the image iterator. {{(pid=61356) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2362.248703] env[61356]: DEBUG oslo_vmware.rw_handles [None req-3d907b7a-7a35-4891-8c78-81c2023e6163 tempest-ServersTestJSON-1488391771 tempest-ServersTestJSON-1488391771-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a85527b2-34f4-47cb-b70b-b816af7eda4d/88d3b9c3-6614-40fd-b1c9-50fb806e539a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61356) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2362.281912] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7439ea-5d35-4e18-87cd-b6bd382a30a8 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.289405] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964b5e8b-9c6b-483d-bc93-2ffc6cc7590d {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.318274] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0d1f09-b049-4540-95c1-f2e3d435d40e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.324978] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815c8a69-8e48-4a82-927a-3bf73036127c {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.337307] env[61356]: DEBUG nova.compute.provider_tree [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Inventory has not changed in ProviderTree for provider: cfb833eb-84f8-480d-866a-1ad2fb871484 {{(pid=61356) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2362.347035] env[61356]: DEBUG nova.scheduler.client.report [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Inventory has not changed for provider cfb833eb-84f8-480d-866a-1ad2fb871484 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 210, 'reserved': 0, 'min_unit': 1, 'max_unit': 107, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61356) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2362.359599] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.245s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.360132] env[61356]: ERROR nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.360132] env[61356]: Faults: ['InvalidArgument'] [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Traceback (most recent call last): [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self.driver.spawn(context, instance, image_meta, [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self._fetch_image_if_missing(context, vi) [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] image_cache(vi, tmp_image_ds_loc) [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] vm_util.copy_virtual_disk( [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] session._wait_for_task(vmdk_copy_task) [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] return self.wait_for_task(task_ref) [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] return evt.wait() [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] result = hub.switch() [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] return self.greenlet.switch() [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] self.f(*self.args, **self.kw) [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] raise exceptions.translate_fault(task_info.error) [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Faults: ['InvalidArgument'] [ 2362.360132] env[61356]: ERROR nova.compute.manager [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] [ 2362.361032] env[61356]: DEBUG nova.compute.utils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] VimFaultException {{(pid=61356) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2362.362319] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Build of instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd was re-scheduled: A specified parameter was not correct: fileType [ 2362.362319] env[61356]: Faults: ['InvalidArgument'] {{(pid=61356) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2362.362684] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Unplugging VIFs for instance {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2362.362857] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61356) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2362.363037] env[61356]: DEBUG nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2362.363206] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2362.531739] env[61356]: DEBUG oslo_concurrency.lockutils [None req-737313d5-9b72-4b90-b9aa-71628c088879 tempest-DeleteServersTestJSON-1040658938 tempest-DeleteServersTestJSON-1040658938-project-member] Acquiring lock "0f222fb6-d822-4d36-80be-8072497dd691" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.742033] env[61356]: DEBUG nova.network.neutron [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2362.756644] env[61356]: INFO nova.compute.manager [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Took 0.39 seconds to deallocate network for instance. [ 2362.853529] env[61356]: INFO nova.scheduler.client.report [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Deleted allocations for instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd [ 2362.870586] env[61356]: DEBUG oslo_concurrency.lockutils [None req-c88a26c0-cfec-46bc-b9e4-7bb2c914a85f tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 566.830s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.870586] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 371.449s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.870586] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Acquiring lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.870867] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.871061] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.872935] env[61356]: INFO nova.compute.manager [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Terminating instance [ 2362.874699] env[61356]: DEBUG nova.compute.manager [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Start destroying the instance on the hypervisor. {{(pid=61356) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2362.874782] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Destroying instance {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2362.875229] env[61356]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52b34943-9581-43ed-bbfb-a685e2be96d1 {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.884240] env[61356]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cb4c7b-42ba-4779-bdbf-68d462fa170e {{(pid=61356) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.909905] env[61356]: WARNING nova.virt.vmwareapi.vmops [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7f078f06-c61b-4215-a95b-25ba3adaf5fd could not be found. [ 2362.910127] env[61356]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Instance destroyed {{(pid=61356) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2362.910307] env[61356]: INFO nova.compute.manager [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2362.910549] env[61356]: DEBUG oslo.service.loopingcall [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61356) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2362.910762] env[61356]: DEBUG nova.compute.manager [-] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Deallocating network for instance {{(pid=61356) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2362.910858] env[61356]: DEBUG nova.network.neutron [-] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] deallocate_for_instance() {{(pid=61356) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2362.932829] env[61356]: DEBUG nova.network.neutron [-] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Updating instance_info_cache with network_info: [] {{(pid=61356) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2362.941305] env[61356]: INFO nova.compute.manager [-] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] Took 0.03 seconds to deallocate network for instance. [ 2363.025045] env[61356]: DEBUG oslo_concurrency.lockutils [None req-8f2eca81-fcb3-45d8-a707-e15dbf38f932 tempest-ServerMetadataTestJSON-63803637 tempest-ServerMetadataTestJSON-63803637-project-member] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.025846] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.997s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.026047] env[61356]: INFO nova.compute.manager [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] [instance: 7f078f06-c61b-4215-a95b-25ba3adaf5fd] During sync_power_state the instance has a pending task (deleting). Skip. [ 2363.026228] env[61356]: DEBUG oslo_concurrency.lockutils [None req-7569aba8-033d-4e88-8946-008c8ab1c855 None None] Lock "7f078f06-c61b-4215-a95b-25ba3adaf5fd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61356) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}