[ 529.663755] env[66958]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 530.289576] env[67008]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 531.639195] env[67008]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=67008) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 531.639559] env[67008]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=67008) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 531.639675] env[67008]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=67008) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 531.639940] env[67008]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 531.836513] env[67008]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=67008) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:384}} [ 531.846779] env[67008]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=67008) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:422}} [ 531.950639] env[67008]: INFO nova.virt.driver [None req-bf452c47-4677-4ebc-8456-9a4b8f6f2f22 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 532.024225] env[67008]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 532.024372] env[67008]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 532.024476] env[67008]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=67008) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 534.933354] env[67008]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-8fb067c8-0907-48b1-845d-25c1d70c26cd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.950462] env[67008]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=67008) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 534.950643] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-93cbb7f1-85ed-4487-b391-bb62baea993d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.984176] env[67008]: INFO oslo_vmware.api [-] Successfully established new session; session ID is b3ba5. [ 534.984310] env[67008]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.960s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 534.984864] env[67008]: INFO nova.virt.vmwareapi.driver [None req-bf452c47-4677-4ebc-8456-9a4b8f6f2f22 None None] VMware vCenter version: 7.0.3 [ 534.988586] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738bca30-3a3c-4c78-8e15-a575058b5d7e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.005875] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1cfa97-309c-44a6-b01a-d6cc6cc3ae11 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.011751] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e82eab-e9a8-4a80-a7da-16e47b6f6a17 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.018218] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e576b17-e401-4a5b-a0f7-f94e7120f41e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.031132] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f57ba1-3f8f-4e83-8ec5-4764398d5a3a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.037042] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37aaed7-fd0e-4658-b098-87f96350207f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.067811] env[67008]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-bef71ce1-97b7-4e66-b473-627a0d497022 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.072590] env[67008]: DEBUG nova.virt.vmwareapi.driver [None req-bf452c47-4677-4ebc-8456-9a4b8f6f2f22 None None] Extension org.openstack.compute already exists. {{(pid=67008) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 535.075207] env[67008]: INFO nova.compute.provider_config [None req-bf452c47-4677-4ebc-8456-9a4b8f6f2f22 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 535.094672] env[67008]: DEBUG nova.context [None req-bf452c47-4677-4ebc-8456-9a4b8f6f2f22 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),09c19ea7-b545-49ed-aa6e-9b87508d20cc(cell1) {{(pid=67008) load_cells /opt/stack/nova/nova/context.py:464}} [ 535.096614] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 535.096836] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 535.097543] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 535.097952] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Acquiring lock "09c19ea7-b545-49ed-aa6e-9b87508d20cc" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 535.098156] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Lock "09c19ea7-b545-49ed-aa6e-9b87508d20cc" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 535.099122] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Lock "09c19ea7-b545-49ed-aa6e-9b87508d20cc" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 535.123621] env[67008]: INFO dbcounter [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Registered counter for database nova_cell0 [ 535.131843] env[67008]: INFO dbcounter [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Registered counter for database nova_cell1 [ 535.135112] env[67008]: DEBUG oslo_db.sqlalchemy.engines [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=67008) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 535.135538] env[67008]: DEBUG oslo_db.sqlalchemy.engines [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=67008) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 535.140343] env[67008]: DEBUG dbcounter [-] [67008] Writer thread running {{(pid=67008) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 535.141184] env[67008]: DEBUG dbcounter [-] [67008] Writer thread running {{(pid=67008) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 535.143217] env[67008]: ERROR nova.db.main.api [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 535.143217] env[67008]: result = function(*args, **kwargs) [ 535.143217] env[67008]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 535.143217] env[67008]: return func(*args, **kwargs) [ 535.143217] env[67008]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 535.143217] env[67008]: result = fn(*args, **kwargs) [ 535.143217] env[67008]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 535.143217] env[67008]: return f(*args, **kwargs) [ 535.143217] env[67008]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 535.143217] env[67008]: return db.service_get_minimum_version(context, binaries) [ 535.143217] env[67008]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 535.143217] env[67008]: _check_db_access() [ 535.143217] env[67008]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 535.143217] env[67008]: stacktrace = ''.join(traceback.format_stack()) [ 535.143217] env[67008]: [ 535.145132] env[67008]: ERROR nova.db.main.api [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 535.145132] env[67008]: result = function(*args, **kwargs) [ 535.145132] env[67008]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 535.145132] env[67008]: return func(*args, **kwargs) [ 535.145132] env[67008]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 535.145132] env[67008]: result = fn(*args, **kwargs) [ 535.145132] env[67008]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 535.145132] env[67008]: return f(*args, **kwargs) [ 535.145132] env[67008]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 535.145132] env[67008]: return db.service_get_minimum_version(context, binaries) [ 535.145132] env[67008]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 535.145132] env[67008]: _check_db_access() [ 535.145132] env[67008]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 535.145132] env[67008]: stacktrace = ''.join(traceback.format_stack()) [ 535.145132] env[67008]: [ 535.145132] env[67008]: WARNING nova.objects.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 535.145132] env[67008]: WARNING nova.objects.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Failed to get minimum service version for cell 09c19ea7-b545-49ed-aa6e-9b87508d20cc [ 535.145863] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Acquiring lock "singleton_lock" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 535.145863] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Acquired lock "singleton_lock" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 535.145863] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Releasing lock "singleton_lock" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 535.146031] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Full set of CONF: {{(pid=67008) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:362}} [ 535.146179] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ******************************************************************************** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2589}} [ 535.146307] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] Configuration options gathered from: {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2590}} [ 535.146442] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2591}} [ 535.146628] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2592}} [ 535.146754] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ================================================================================ {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2594}} [ 535.146959] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] allow_resize_to_same_host = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.147142] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] arq_binding_timeout = 300 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.147272] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] backdoor_port = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.147398] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] backdoor_socket = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.147559] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] block_device_allocate_retries = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.147721] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] block_device_allocate_retries_interval = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.147890] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cert = self.pem {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.148081] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.148242] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute_monitors = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.148410] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] config_dir = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.148577] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] config_drive_format = iso9660 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.148709] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.148870] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] config_source = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.149046] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] console_host = devstack {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.149212] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] control_exchange = nova {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.149369] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cpu_allocation_ratio = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.149527] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] daemon = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.149732] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] debug = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.149905] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] default_access_ip_network_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.150087] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] default_availability_zone = nova {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.150247] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] default_ephemeral_format = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.150431] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] default_green_pool_size = 1000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.150674] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.150838] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] default_schedule_zone = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.150994] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] disk_allocation_ratio = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.151166] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] enable_new_services = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.151341] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] enabled_apis = ['osapi_compute'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.151504] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] enabled_ssl_apis = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.151665] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] flat_injected = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.151826] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] force_config_drive = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.151984] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] force_raw_images = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.152165] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] graceful_shutdown_timeout = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.152350] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] heal_instance_info_cache_interval = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.152579] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] host = cpu-1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.152755] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] initial_cpu_allocation_ratio = 4.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.152918] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] initial_disk_allocation_ratio = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.153099] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] initial_ram_allocation_ratio = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.153330] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.153507] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instance_build_timeout = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.153672] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instance_delete_interval = 300 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.153836] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instance_format = [instance: %(uuid)s] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.154011] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instance_name_template = instance-%08x {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.154182] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instance_usage_audit = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.154351] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instance_usage_audit_period = month {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.154520] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.154712] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] instances_path = /opt/stack/data/nova/instances {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.154890] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] internal_service_availability_zone = internal {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.155059] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] key = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.155233] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] live_migration_retry_count = 30 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.155472] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_config_append = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.155662] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.155826] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_dir = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.155985] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.156128] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_options = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.156292] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_rotate_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.156463] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_rotate_interval_type = days {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.156661] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] log_rotation_type = none {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.156797] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.156926] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.157110] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.157280] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.157411] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.157574] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] long_rpc_timeout = 1800 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.157733] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] max_concurrent_builds = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.157891] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] max_concurrent_live_migrations = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.158081] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] max_concurrent_snapshots = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.158216] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] max_local_block_devices = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.158367] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] max_logfile_count = 30 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.158524] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] max_logfile_size_mb = 200 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.158681] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] maximum_instance_delete_attempts = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.158846] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] metadata_listen = 0.0.0.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.159015] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] metadata_listen_port = 8775 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.159186] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] metadata_workers = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.159346] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] migrate_max_retries = -1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.159509] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] mkisofs_cmd = genisoimage {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.159712] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] my_block_storage_ip = 10.180.1.21 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.159844] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] my_ip = 10.180.1.21 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.160009] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] network_allocate_retries = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.160194] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.160379] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] osapi_compute_listen = 0.0.0.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.160551] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] osapi_compute_listen_port = 8774 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.160718] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] osapi_compute_unique_server_name_scope = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.160884] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] osapi_compute_workers = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.161054] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] password_length = 12 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.161221] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] periodic_enable = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.161391] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] periodic_fuzzy_delay = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.161560] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] pointer_model = usbtablet {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.161726] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] preallocate_images = none {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.161883] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] publish_errors = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.162023] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] pybasedir = /opt/stack/nova {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.162182] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ram_allocation_ratio = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.162363] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rate_limit_burst = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.162542] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rate_limit_except_level = CRITICAL {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.162704] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rate_limit_interval = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.162864] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] reboot_timeout = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.163041] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] reclaim_instance_interval = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.163204] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] record = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.163372] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] reimage_timeout_per_gb = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.163536] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] report_interval = 120 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.163694] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rescue_timeout = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.163851] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] reserved_host_cpus = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.164015] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] reserved_host_disk_mb = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.164178] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] reserved_host_memory_mb = 512 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.164339] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] reserved_huge_pages = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.164501] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] resize_confirm_window = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.164681] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] resize_fs_using_block_device = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.164851] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] resume_guests_state_on_host_boot = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.165033] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.165199] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rpc_response_timeout = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.165360] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] run_external_periodic_tasks = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.165528] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] running_deleted_instance_action = reap {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.165688] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] running_deleted_instance_poll_interval = 1800 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.165845] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] running_deleted_instance_timeout = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.166011] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler_instance_sync_interval = 120 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.166182] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_down_time = 720 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.166348] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] servicegroup_driver = db {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.166508] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] shelved_offload_time = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.166669] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] shelved_poll_interval = 3600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.166837] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] shutdown_timeout = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.167007] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] source_is_ipv6 = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.167174] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ssl_only = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.167423] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.167595] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] sync_power_state_interval = 600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.167756] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] sync_power_state_pool_size = 1000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.167924] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] syslog_log_facility = LOG_USER {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.168101] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] tempdir = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.168269] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] timeout_nbd = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.168437] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] transport_url = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.168598] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] update_resources_interval = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.168760] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] use_cow_images = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.168922] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] use_eventlog = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.169116] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] use_journal = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.169285] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] use_json = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.169446] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] use_rootwrap_daemon = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.169603] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] use_stderr = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.169774] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] use_syslog = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.169915] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vcpu_pin_set = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.170093] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plugging_is_fatal = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.170270] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plugging_timeout = 300 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.170479] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] virt_mkfs = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.170653] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] volume_usage_poll_interval = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.170834] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] watch_log_file = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.171013] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] web = /usr/share/spice-html5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 535.171212] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_concurrency.disable_process_locking = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.171516] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.171702] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.171869] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.172050] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_metrics.metrics_process_name = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.172224] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.172436] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.172633] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.auth_strategy = keystone {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.172804] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.compute_link_prefix = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.172976] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.173162] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.dhcp_domain = novalocal {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.173339] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.enable_instance_password = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.173500] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.glance_link_prefix = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.173664] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.173833] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.instance_list_cells_batch_strategy = distributed {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.173994] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.instance_list_per_project_cells = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.174168] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.list_records_by_skipping_down_cells = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.174332] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.local_metadata_per_cell = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.174498] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.max_limit = 1000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.174682] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.metadata_cache_expiration = 15 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.174868] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.neutron_default_tenant_id = default {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.175047] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.use_forwarded_for = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.175217] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.use_neutron_default_nets = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.175386] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.175644] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.vendordata_dynamic_failure_fatal = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.175712] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.175878] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.vendordata_dynamic_ssl_certfile = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.176061] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.vendordata_dynamic_targets = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.176227] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.vendordata_jsonfile_path = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.176407] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api.vendordata_providers = ['StaticJSON'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.176597] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.backend = dogpile.cache.memcached {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.176762] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.backend_argument = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.176930] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.config_prefix = cache.oslo {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.177114] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.dead_timeout = 60.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.177280] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.debug_cache_backend = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.177444] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.enable_retry_client = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.177897] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.enable_socket_keepalive = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.177897] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.enabled = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.178054] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.expiration_time = 600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.178105] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.hashclient_retry_attempts = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.178259] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.hashclient_retry_delay = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.178422] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_dead_retry = 300 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.178592] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_password = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.178754] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.178915] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.179086] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_pool_maxsize = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.179244] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_pool_unused_timeout = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.179401] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_sasl_enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.179577] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_servers = ['localhost:11211'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.179741] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_socket_timeout = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.179911] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.memcache_username = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.180086] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.proxies = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.180256] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.retry_attempts = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.180451] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.retry_delay = 0.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.180622] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.socket_keepalive_count = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.180784] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.socket_keepalive_idle = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.180943] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.socket_keepalive_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.181116] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.tls_allowed_ciphers = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.181278] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.tls_cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.181436] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.tls_certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.181595] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.tls_enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.181752] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cache.tls_keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.181919] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.182103] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.auth_type = password {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.182291] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.182474] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.catalog_info = volumev3::publicURL {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.182638] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.182806] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.182971] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.cross_az_attach = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.183152] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.debug = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.183322] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.endpoint_template = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.183507] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.http_retries = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.183646] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.183805] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.183976] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.os_region_name = RegionOne {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.184156] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.184321] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cinder.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.184497] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.184674] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.cpu_dedicated_set = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.184843] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.cpu_shared_set = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.185038] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.image_type_exclude_list = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.185191] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.live_migration_wait_for_vif_plug = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.185354] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.max_concurrent_disk_ops = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.185520] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.max_disk_devices_to_attach = -1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.185851] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.185851] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.186017] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.resource_provider_association_refresh = 300 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.186183] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.shutdown_retry_interval = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.186365] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.186543] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] conductor.workers = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.186718] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] console.allowed_origins = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.186880] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] console.ssl_ciphers = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.187062] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] console.ssl_minimum_version = default {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.187241] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] consoleauth.token_ttl = 600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.187414] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.187575] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.187743] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.187910] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.connect_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.188074] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.connect_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.188236] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.endpoint_override = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.188398] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.188560] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.188721] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.max_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.188879] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.min_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.189047] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.region_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.189212] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.service_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.189381] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.service_type = accelerator {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.189546] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.189702] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.status_code_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.189860] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.status_code_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.190023] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.190207] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.190395] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] cyborg.version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.190592] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.backend = sqlalchemy {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.190771] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.connection = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.190940] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.connection_debug = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.191131] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.connection_parameters = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.191299] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.connection_recycle_time = 3600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.191472] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.connection_trace = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.191634] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.db_inc_retry_interval = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.191795] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.db_max_retries = 20 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.191955] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.db_max_retry_interval = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.192129] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.db_retry_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.192318] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.max_overflow = 50 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.192518] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.max_pool_size = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.192699] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.max_retries = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.192865] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.mysql_sql_mode = TRADITIONAL {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.193054] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.mysql_wsrep_sync_wait = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196533] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.pool_timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196533] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.retry_interval = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196533] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.slave_connection = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196533] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.sqlite_synchronous = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196533] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] database.use_db_reconnect = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196533] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.backend = sqlalchemy {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196744] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.connection = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196744] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.connection_debug = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196744] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.connection_parameters = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196744] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.connection_recycle_time = 3600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196744] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.connection_trace = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196744] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.db_inc_retry_interval = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196926] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.db_max_retries = 20 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196926] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.db_max_retry_interval = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196926] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.db_retry_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196926] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.max_overflow = 50 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196926] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.max_pool_size = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.196926] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.max_retries = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197124] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197124] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.mysql_wsrep_sync_wait = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197124] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.pool_timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197124] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.retry_interval = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197124] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.slave_connection = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197124] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] api_database.sqlite_synchronous = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197300] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] devices.enabled_mdev_types = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197416] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197580] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ephemeral_storage_encryption.enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197752] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ephemeral_storage_encryption.key_size = 512 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.197923] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.api_servers = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.198099] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.198267] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.198449] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.198622] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.connect_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.198789] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.connect_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.198952] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.debug = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.199133] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.default_trusted_certificate_ids = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.199297] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.enable_certificate_validation = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.199461] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.enable_rbd_download = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.199620] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.endpoint_override = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.199784] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.199942] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.200115] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.max_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.200283] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.min_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.200563] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.num_retries = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.200768] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.rbd_ceph_conf = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.200937] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.rbd_connect_timeout = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.201124] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.rbd_pool = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.201298] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.rbd_user = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.201460] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.region_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.201618] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.service_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.201784] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.service_type = image {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.201945] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.202115] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.status_code_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.202314] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.status_code_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.202464] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.202649] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.202889] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.verify_glance_signatures = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.202979] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] glance.version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.203154] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] guestfs.debug = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.203368] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.config_drive_cdrom = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.203527] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.config_drive_inject_password = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.203697] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.dynamic_memory_ratio = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.203861] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.enable_instance_metrics_collection = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.204035] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.enable_remotefx = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.204211] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.instances_path_share = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.204377] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.iscsi_initiator_list = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.204539] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.limit_cpu_features = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.204703] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.mounted_disk_query_retry_count = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.204864] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.mounted_disk_query_retry_interval = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.205039] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.power_state_check_timeframe = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.205217] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.power_state_event_polling_interval = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.205388] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.qemu_img_cmd = qemu-img.exe {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.205551] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.use_multipath_io = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.205713] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.volume_attach_retry_count = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.205873] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.volume_attach_retry_interval = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.206062] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.vswitch_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.206202] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] hyperv.wait_soft_reboot_seconds = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.206368] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] mks.enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.206736] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.206927] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] image_cache.manager_interval = 2400 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.207109] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] image_cache.precache_concurrency = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.207283] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] image_cache.remove_unused_base_images = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.207455] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.207622] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.207801] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] image_cache.subdirectory_name = _base {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.207976] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.api_max_retries = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.208156] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.api_retry_interval = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.208321] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.208485] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.auth_type = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.208645] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.208807] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.208969] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.209145] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.conductor_group = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.209306] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.connect_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.209469] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.connect_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.209627] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.endpoint_override = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.209789] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.209949] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.210120] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.max_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.210279] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.min_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.210444] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.peer_list = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.210602] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.region_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.210765] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.serial_console_state_timeout = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.210924] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.service_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.211113] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.service_type = baremetal {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.211282] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.211441] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.status_code_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.211602] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.status_code_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.211763] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.211944] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.212117] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ironic.version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.212318] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.212504] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] key_manager.fixed_key = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.212726] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.212897] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.barbican_api_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.213077] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.barbican_endpoint = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.213254] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.barbican_endpoint_type = public {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.213421] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.barbican_region_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.213578] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.213739] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.213903] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.214077] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.214238] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.214404] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.number_of_retries = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.214565] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.retry_delay = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.214726] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.send_service_user_token = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.214888] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.215054] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.215218] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.verify_ssl = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.215396] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican.verify_ssl_path = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.215573] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.215737] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.auth_type = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.215897] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.216068] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.216229] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.216390] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.216549] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.216711] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.216866] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] barbican_service_user.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.217044] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.approle_role_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.217207] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.approle_secret_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.217365] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.217523] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.217685] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.217845] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.218007] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.218183] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.kv_mountpoint = secret {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.218343] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.kv_path = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.218507] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.kv_version = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.218664] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.namespace = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.218862] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.root_token_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.218981] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.219150] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.ssl_ca_crt_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.219309] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.219473] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.use_ssl = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.219637] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.219802] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.219965] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.auth_type = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.220135] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.220294] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.220457] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.220617] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.connect_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.220776] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.connect_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.220934] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.endpoint_override = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.221115] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.221277] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.221433] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.max_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.221587] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.min_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.221741] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.region_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.221896] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.service_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.222072] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.service_type = identity {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.222235] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.222423] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.status_code_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.222593] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.status_code_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.222753] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.222931] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.223103] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] keystone.version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.223346] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.connection_uri = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.223536] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.cpu_mode = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.223709] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.cpu_model_extra_flags = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.223880] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.cpu_models = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.224064] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.cpu_power_governor_high = performance {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.224237] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.cpu_power_governor_low = powersave {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.224400] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.cpu_power_management = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.224571] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.224735] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.device_detach_attempts = 8 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.224895] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.device_detach_timeout = 20 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.225068] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.disk_cachemodes = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.225229] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.disk_prefix = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.225394] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.enabled_perf_events = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.225560] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.file_backed_memory = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.225727] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.gid_maps = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.225888] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.hw_disk_discard = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.226065] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.hw_machine_type = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.226244] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.images_rbd_ceph_conf = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.226403] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.226570] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.226737] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.images_rbd_glance_store_name = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.226903] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.images_rbd_pool = rbd {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.227083] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.images_type = default {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.227247] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.images_volume_group = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.227466] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.inject_key = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.227663] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.inject_partition = -2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.227832] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.inject_password = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.227997] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.iscsi_iface = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.228176] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.iser_use_multipath = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.228343] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_bandwidth = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.228508] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_completion_timeout = 800 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.228669] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_downtime = 500 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.228829] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_downtime_delay = 75 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.228993] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_downtime_steps = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.229174] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_inbound_addr = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.229341] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_permit_auto_converge = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.229509] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_permit_post_copy = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.229692] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_scheme = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.229845] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_timeout_action = abort {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.230020] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_tunnelled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.230180] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_uri = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.230346] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.live_migration_with_native_tls = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.230508] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.max_queues = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.230671] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.mem_stats_period_seconds = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.230831] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.nfs_mount_options = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.231166] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.231344] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.num_aoe_discover_tries = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.231511] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.num_iser_scan_tries = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.231675] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.num_memory_encrypted_guests = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.231840] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.num_nvme_discover_tries = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.232015] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.num_pcie_ports = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.232187] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.num_volume_scan_tries = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.232384] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.pmem_namespaces = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.232551] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.quobyte_client_cfg = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.232842] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.233024] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rbd_connect_timeout = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.233194] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.233398] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.233570] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rbd_secret_uuid = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.233730] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rbd_user = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.233895] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.realtime_scheduler_priority = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.234084] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.remote_filesystem_transport = ssh {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.234252] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rescue_image_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.234412] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rescue_kernel_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.234571] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rescue_ramdisk_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.234739] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rng_dev_path = /dev/urandom {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.234898] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.rx_queue_size = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.235078] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.smbfs_mount_options = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.235386] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.235586] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.snapshot_compression = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.235754] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.snapshot_image_format = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.235978] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.236164] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.sparse_logical_volumes = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.236328] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.swtpm_enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.236495] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.swtpm_group = tss {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.236666] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.swtpm_user = tss {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.236837] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.sysinfo_serial = unique {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.236996] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.tb_cache_size = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.237168] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.tx_queue_size = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.237334] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.uid_maps = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.237498] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.use_virtio_for_bridges = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.237665] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.virt_type = kvm {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.237835] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.volume_clear = zero {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.237997] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.volume_clear_size = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.238175] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.volume_use_multipath = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.238336] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.vzstorage_cache_path = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.238505] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.238668] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.vzstorage_mount_group = qemu {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.238832] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.vzstorage_mount_opts = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.239011] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.239291] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.239491] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.vzstorage_mount_user = stack {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.239666] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.239839] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.240026] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.auth_type = password {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.240188] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.240349] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.240514] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.240673] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.connect_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.240832] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.connect_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.241008] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.default_floating_pool = public {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.241174] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.endpoint_override = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.241338] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.extension_sync_interval = 600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.241499] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.http_retries = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.241659] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.241817] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.241974] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.max_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.242154] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.metadata_proxy_shared_secret = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.242335] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.min_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.242516] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.ovs_bridge = br-int {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.242685] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.physnets = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.242853] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.region_name = RegionOne {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.243031] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.service_metadata_proxy = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.243194] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.service_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.243404] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.service_type = network {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.243577] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.243737] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.status_code_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.243896] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.status_code_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.244070] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.244253] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.244412] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] neutron.version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.244584] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] notifications.bdms_in_notifications = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.244757] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] notifications.default_level = INFO {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.244928] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] notifications.notification_format = unversioned {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.245106] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] notifications.notify_on_state_change = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.245283] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.245465] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] pci.alias = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.245636] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] pci.device_spec = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.245800] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] pci.report_in_placement = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.245971] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.246155] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.auth_type = password {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.246364] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.auth_url = http://10.180.1.21/identity {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.246490] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.246641] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.246802] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.246959] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.connect_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.247130] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.connect_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.247289] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.default_domain_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.247471] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.default_domain_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.247646] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.domain_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.247803] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.domain_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.247959] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.endpoint_override = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.248134] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.248291] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.248446] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.max_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.248598] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.min_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.248765] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.password = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.248922] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.project_domain_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.249100] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.project_domain_name = Default {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.249269] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.project_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.249442] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.project_name = service {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.249610] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.region_name = RegionOne {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.249771] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.service_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.249940] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.service_type = placement {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.250116] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.250277] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.status_code_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.250438] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.status_code_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.250596] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.system_scope = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.250753] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.250916] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.trust_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.251084] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.user_domain_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.251255] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.user_domain_name = Default {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.251432] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.user_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.251626] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.username = placement {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.251810] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.251971] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] placement.version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.252162] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.cores = 20 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.252357] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.count_usage_from_placement = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.252538] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.252710] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.injected_file_content_bytes = 10240 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.252878] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.injected_file_path_length = 255 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.253056] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.injected_files = 5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.253227] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.instances = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.253397] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.key_pairs = 100 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.253560] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.metadata_items = 128 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.253725] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.ram = 51200 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.253891] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.recheck_quota = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.254068] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.server_group_members = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.254237] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] quota.server_groups = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.254405] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rdp.enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.254719] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] rdp.html5_proxy_base_url = http://127.0.0.1:6083/ {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.254900] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.255080] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.255250] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.image_metadata_prefilter = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.255413] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.255580] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.max_attempts = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.255743] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.max_placement_results = 1000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.255908] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.256084] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.query_placement_for_image_type_support = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.256250] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.256461] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] scheduler.workers = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.256598] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.256770] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.256948] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.257133] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.257302] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.257465] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.257627] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.257817] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.257986] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.host_subset_size = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.258169] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.258330] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.image_properties_default_architecture = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.258496] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.258660] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.isolated_hosts = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.258823] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.isolated_images = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.258985] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.max_instances_per_host = 50 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.259158] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.259320] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.259509] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.pci_in_placement = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.259693] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.259860] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.260040] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.260207] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.260371] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.260534] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.260696] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.track_instance_changes = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.260872] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.261052] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] metrics.required = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.261220] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] metrics.weight_multiplier = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.261383] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] metrics.weight_of_unavailable = -10000.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.261547] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] metrics.weight_setting = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.261835] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.262013] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] serial_console.enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.262195] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] serial_console.port_range = 10000:20000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.262404] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.262595] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.262766] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] serial_console.serialproxy_port = 6083 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.262934] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.263123] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.auth_type = password {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.263306] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.263487] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.263654] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.263818] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.263978] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.264177] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.send_service_user_token = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.264344] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.264506] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] service_user.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.264678] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.agent_enabled = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.264840] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.265141] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.265351] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.html5proxy_host = 0.0.0.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.265559] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.html5proxy_port = 6082 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.265730] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.image_compression = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.265893] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.jpeg_compression = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.266065] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.playback_compression = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.266243] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.server_listen = 127.0.0.1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.266414] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.266576] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.streaming_mode = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.266791] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] spice.zlib_compression = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.267088] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] upgrade_levels.baseapi = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.267324] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] upgrade_levels.cert = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.267515] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] upgrade_levels.compute = auto {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.267683] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] upgrade_levels.conductor = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.267845] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] upgrade_levels.scheduler = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.268024] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.268203] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.auth_type = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.268403] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.268592] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.268785] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.268962] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.269140] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.269333] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.269519] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vendordata_dynamic_auth.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.269695] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.api_retry_count = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.269858] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.ca_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.270042] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.cache_prefix = devstack-image-cache {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.270237] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.cluster_name = testcl1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.270413] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.connection_pool_size = 10 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.270593] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.console_delay_seconds = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.270766] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.datastore_regex = ^datastore.* {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.270967] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.271159] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.host_password = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.271327] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.host_port = 443 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.271499] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.host_username = administrator@vsphere.local {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.271668] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.insecure = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.271852] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.integration_bridge = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.272157] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.maximum_objects = 100 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.272443] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.pbm_default_policy = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.272654] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.pbm_enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.272824] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.pbm_wsdl_location = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.272997] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.273176] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.serial_port_proxy_uri = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.273338] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.serial_port_service_uri = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.273552] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.task_poll_interval = 0.5 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.273735] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.use_linked_clone = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.273908] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.vnc_keymap = en-us {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.274087] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.vnc_port = 5900 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.274257] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vmware.vnc_port_total = 10000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.274461] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.auth_schemes = ['none'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.274662] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.274950] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.275152] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.275323] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.novncproxy_port = 6080 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.275530] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.server_listen = 127.0.0.1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.275730] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.275894] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.vencrypt_ca_certs = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.276064] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.vencrypt_client_cert = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.276225] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vnc.vencrypt_client_key = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.276399] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.276564] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.disable_deep_image_inspection = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.276727] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.disable_fallback_pcpu_query = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.276885] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.disable_group_policy_check_upcall = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.277057] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.277224] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.disable_rootwrap = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.277386] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.enable_numa_live_migration = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.277547] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.277707] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.277870] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.handle_virt_lifecycle_events = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.278042] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.libvirt_disable_apic = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.278210] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.never_download_image_if_on_rbd = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.278374] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.278557] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.278736] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.278900] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.279075] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.279240] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.279402] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.279566] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.279731] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.279916] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.280098] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.client_socket_timeout = 900 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.280272] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.default_pool_size = 1000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.280441] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.keep_alive = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.280606] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.max_header_line = 16384 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.280769] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.secure_proxy_ssl_header = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.280930] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.ssl_ca_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.281101] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.ssl_cert_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.281265] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.ssl_key_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.281429] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.tcp_keepidle = 600 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.281627] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.281805] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] zvm.ca_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.281969] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] zvm.cloud_connector_url = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.282264] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.282464] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] zvm.reachable_timeout = 300 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.282655] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.enforce_new_defaults = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.282831] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.enforce_scope = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.283022] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.policy_default_rule = default {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.283208] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.283408] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.policy_file = policy.yaml {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.283586] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.283748] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.283909] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.284081] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.284247] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.284419] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.284608] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.284793] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.connection_string = messaging:// {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.284963] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.enabled = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.285147] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.es_doc_type = notification {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.285312] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.es_scroll_size = 10000 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.285481] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.es_scroll_time = 2m {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.285643] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.filter_error_trace = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.285807] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.hmac_keys = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.285972] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.sentinel_service_name = mymaster {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.286151] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.socket_timeout = 0.1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.286313] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.trace_requests = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.286489] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler.trace_sqlalchemy = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.286672] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler_jaeger.process_tags = {} {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.286837] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler_jaeger.service_name_prefix = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.286999] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] profiler_otlp.service_name_prefix = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.287180] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] remote_debug.host = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.287341] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] remote_debug.port = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.287523] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.287690] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.287855] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.288027] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.288197] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.288358] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.288521] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.288682] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.heartbeat_rate = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.288843] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.289006] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.289185] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.289354] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.289525] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.289690] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.289853] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.290328] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.290391] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.290570] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.290770] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.290911] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.291088] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.291262] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.291427] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.291592] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.291761] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.291992] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.ssl = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.292110] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.292306] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.292656] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.292854] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.293043] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_rabbit.ssl_version = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.293243] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.293439] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_notifications.retry = -1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.293634] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.293812] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_messaging_notifications.transport_url = **** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.293986] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.auth_section = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.294169] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.auth_type = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.294329] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.cafile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.294500] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.certfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.294666] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.collect_timing = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.294822] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.connect_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.294982] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.connect_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.295157] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.endpoint_id = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.295317] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.endpoint_override = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.295478] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.insecure = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.295635] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.keyfile = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.295790] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.max_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.295943] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.min_version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.296114] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.region_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.296273] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.service_name = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.296428] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.service_type = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.296650] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.split_loggers = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.296832] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.status_code_retries = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.296990] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.status_code_retry_delay = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.297166] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.timeout = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.297326] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.valid_interfaces = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.297511] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_limit.version = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.297688] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_reports.file_event_handler = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.297855] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_reports.file_event_handler_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.298022] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] oslo_reports.log_dir = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.298198] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.298361] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_linux_bridge_privileged.group = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.298521] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.298686] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.298848] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.299021] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_linux_bridge_privileged.user = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.299189] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.299348] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_ovs_privileged.group = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.299506] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_ovs_privileged.helper_command = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.299666] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.299827] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.299980] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] vif_plug_ovs_privileged.user = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.300163] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.flat_interface = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.300380] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.300598] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.300728] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.300898] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.301078] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.301252] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.301417] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_linux_bridge.vlan_interface = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.301599] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.301769] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_ovs.isolate_vif = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.301937] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.302116] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.302311] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.302492] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_ovs.ovsdb_interface = native {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.302657] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_vif_ovs.per_port_bridge = False {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.302825] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_brick.lock_path = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.302989] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.303165] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] os_brick.wait_mpath_device_interval = 1 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.303353] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] privsep_osbrick.capabilities = [21] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.303525] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] privsep_osbrick.group = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.303685] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] privsep_osbrick.helper_command = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.303850] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.304021] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] privsep_osbrick.thread_pool_size = 8 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.304187] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] privsep_osbrick.user = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.304358] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.304516] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] nova_sys_admin.group = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.304673] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] nova_sys_admin.helper_command = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.304834] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.304993] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] nova_sys_admin.thread_pool_size = 8 {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.305162] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] nova_sys_admin.user = None {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 535.305294] env[67008]: DEBUG oslo_service.service [None req-11b91188-c433-4009-ba26-e37ed347ad5f None None] ******************************************************************************** {{(pid=67008) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 535.305718] env[67008]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 535.318314] env[67008]: WARNING nova.virt.vmwareapi.driver [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 535.318743] env[67008]: INFO nova.virt.node [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Generated node identity ad100a41-192a-4a03-bdd9-0a78ce856705 [ 535.318962] env[67008]: INFO nova.virt.node [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Wrote node identity ad100a41-192a-4a03-bdd9-0a78ce856705 to /opt/stack/data/n-cpu-1/compute_id [ 535.334772] env[67008]: WARNING nova.compute.manager [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Compute nodes ['ad100a41-192a-4a03-bdd9-0a78ce856705'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 535.385036] env[67008]: INFO nova.compute.manager [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 535.418499] env[67008]: WARNING nova.compute.manager [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 535.418733] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 535.418944] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 535.419107] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 535.419267] env[67008]: DEBUG nova.compute.resource_tracker [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 535.420468] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dc0f94-d8fd-4c73-bcb6-2b9d0b69d132 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.428981] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92469628-f045-48ef-b9b0-21617b31e25b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.444090] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3e8adf-9b96-4c7c-a588-6929df98c97f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.449301] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917f342e-a3cf-4ddf-acb7-99355d1820d6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.478987] env[67008]: DEBUG nova.compute.resource_tracker [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181096MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 535.479156] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 535.479341] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 535.491023] env[67008]: WARNING nova.compute.resource_tracker [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] No compute node record for cpu-1:ad100a41-192a-4a03-bdd9-0a78ce856705: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host ad100a41-192a-4a03-bdd9-0a78ce856705 could not be found. [ 535.505837] env[67008]: INFO nova.compute.resource_tracker [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: ad100a41-192a-4a03-bdd9-0a78ce856705 [ 535.570146] env[67008]: DEBUG nova.compute.resource_tracker [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 535.570361] env[67008]: DEBUG nova.compute.resource_tracker [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 535.716083] env[67008]: INFO nova.scheduler.client.report [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] [req-92720847-af0a-4239-b722-86ee97e8ab62] Created resource provider record via placement API for resource provider with UUID ad100a41-192a-4a03-bdd9-0a78ce856705 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 535.737766] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baa3b8e-da19-4cc8-8b1b-dfd323e4deb3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.745244] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3568d034-3949-4972-be94-36c238c16d0b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.775414] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d712766-9e88-4ae5-a14a-fdedf8a845c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.782650] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b8e39f-d8b5-4f7d-a926-867f752f1836 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.796716] env[67008]: DEBUG nova.compute.provider_tree [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Updating inventory in ProviderTree for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 535.850430] env[67008]: DEBUG nova.scheduler.client.report [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Updated inventory for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 535.850667] env[67008]: DEBUG nova.compute.provider_tree [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Updating resource provider ad100a41-192a-4a03-bdd9-0a78ce856705 generation from 0 to 1 during operation: update_inventory {{(pid=67008) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 535.850809] env[67008]: DEBUG nova.compute.provider_tree [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Updating inventory in ProviderTree for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 535.944509] env[67008]: DEBUG nova.compute.provider_tree [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Updating resource provider ad100a41-192a-4a03-bdd9-0a78ce856705 generation from 1 to 2 during operation: update_traits {{(pid=67008) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 535.963309] env[67008]: DEBUG nova.compute.resource_tracker [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 535.963522] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.484s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 535.963689] env[67008]: DEBUG nova.service [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Creating RPC server for service compute {{(pid=67008) start /opt/stack/nova/nova/service.py:182}} [ 535.976702] env[67008]: DEBUG nova.service [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] Join ServiceGroup membership for this service compute {{(pid=67008) start /opt/stack/nova/nova/service.py:199}} [ 535.976702] env[67008]: DEBUG nova.servicegroup.drivers.db [None req-0a343aa5-a78f-47f3-8cca-15b209124624 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=67008) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 545.142471] env[67008]: DEBUG dbcounter [-] [67008] Writing DB stats nova_cell0:SELECT=1 {{(pid=67008) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 545.143293] env[67008]: DEBUG dbcounter [-] [67008] Writing DB stats nova_cell1:SELECT=1 {{(pid=67008) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 559.979100] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 559.989589] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Getting list of instances from cluster (obj){ [ 559.989589] env[67008]: value = "domain-c8" [ 559.989589] env[67008]: _type = "ClusterComputeResource" [ 559.989589] env[67008]: } {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 559.990690] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9b3b9a-c320-45f4-b346-ac452529f567 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.000052] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Got total of 0 instances {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 560.000277] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 560.000589] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Getting list of instances from cluster (obj){ [ 560.000589] env[67008]: value = "domain-c8" [ 560.000589] env[67008]: _type = "ClusterComputeResource" [ 560.000589] env[67008]: } {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 560.001434] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57176a59-f9f0-4973-97ec-5371bef394cb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.009130] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Got total of 0 instances {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 579.506915] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "668ce119-c0ee-4996-ae4a-bbe0a788cab5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 579.507233] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "668ce119-c0ee-4996-ae4a-bbe0a788cab5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 579.546283] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 579.715401] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 579.715742] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 579.717706] env[67008]: INFO nova.compute.claims [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.916054] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3d61d0-bb7c-4725-b2f9-598826504851 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.925760] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d196ba4-0aae-4a9f-ad16-a50e3c6b976b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.968299] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0a5d98-3664-4eb4-b742-2627352e4b3f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.978618] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d829717-2d49-49ba-a754-5297281da12a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.997726] env[67008]: DEBUG nova.compute.provider_tree [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.013216] env[67008]: DEBUG nova.scheduler.client.report [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 580.033448] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 580.034472] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 580.082276] env[67008]: DEBUG nova.compute.utils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 580.087559] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 580.087853] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 580.115026] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 580.226519] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 580.896637] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 580.897317] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 580.897317] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 580.897553] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 580.897701] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 580.897850] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 580.898366] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 580.898366] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 580.899687] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 580.899687] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 580.899687] env[67008]: DEBUG nova.virt.hardware [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 580.899893] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037a8f9d-58b4-408a-a2f8-ec6d8b9f2ad4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.909256] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25ebd49-4de1-44f8-8115-b059a8cfb731 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.928200] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb23dfe-3953-43af-9c07-947ada511b12 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.065615] env[67008]: DEBUG nova.policy [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5343dcbda10b4898b07cfc371ea9e355', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2ffa090d43c4facaec9fcb96575a5f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 582.694767] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquiring lock "8724bca5-56d0-4e6e-a178-4f3634d37007" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 582.694767] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Lock "8724bca5-56d0-4e6e-a178-4f3634d37007" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 582.713525] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 582.811658] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 582.811727] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 582.813222] env[67008]: INFO nova.compute.claims [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.862432] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Successfully created port: 0186ad22-e31f-4b81-b86c-40720ace171d {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.931916] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6897cc-1e67-44e0-bc60-41db441b3509 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.942521] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef5684d-a973-46a7-bbcd-b08d5d85c8b4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.983689] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b768497e-16ce-4060-ba63-10126a589222 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.993032] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdd0981-e680-43f5-a7d0-de7d5ef914f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.012239] env[67008]: DEBUG nova.compute.provider_tree [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.027527] env[67008]: DEBUG nova.scheduler.client.report [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 583.047973] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.236s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 583.048567] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 583.103595] env[67008]: DEBUG nova.compute.utils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 583.105859] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 583.106255] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 583.127121] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 583.226637] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 583.265698] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.266048] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.266178] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.266379] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.266557] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.266777] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.266977] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.267396] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.267396] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.267948] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.267948] env[67008]: DEBUG nova.virt.hardware [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.271580] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e8bb19-e202-4f53-906e-f92b925a4e40 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.281925] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b15558-5a4d-44d5-b4a4-5a21439a971c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.395205] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquiring lock "e5e1de51-40a9-4b43-b885-87501738dc96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 583.395885] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Lock "e5e1de51-40a9-4b43-b885-87501738dc96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 583.412970] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 583.449234] env[67008]: DEBUG nova.policy [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4326cf0e92d14874b7c7b0d81f26a5a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce910e35b650464ebaa1008f0ad7115b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 583.512246] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 583.512851] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 583.515379] env[67008]: INFO nova.compute.claims [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.652900] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8cdf76-8c0e-4116-a72c-386dcb23937f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.661836] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9ec212-1606-4756-8a8e-2228106de356 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.697614] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e705b45f-03a7-4714-aacd-88c52e6515e5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.710161] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058a7861-79bb-419e-8b88-812048280ab4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.720185] env[67008]: DEBUG nova.compute.provider_tree [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.732584] env[67008]: DEBUG nova.scheduler.client.report [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 583.753703] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.241s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 583.754232] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 583.814472] env[67008]: DEBUG nova.compute.utils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 583.818431] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 583.818518] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 583.836097] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 583.926964] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 583.953972] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.954244] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.954405] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.954583] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.954837] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.954878] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.955108] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.955247] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.955414] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.955571] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.955736] env[67008]: DEBUG nova.virt.hardware [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.956627] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581218eb-cc7a-4474-b343-b0267b5f2e57 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.965982] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadb6705-5b81-49b8-8b38-579af0b0831c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.581669] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Successfully created port: 74188fdf-ba50-4f35-984c-bb871165158d {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.588577] env[67008]: DEBUG nova.policy [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ee7759798654c2593d1df04b4800412', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0b6942a8c3247f3977b80df40c0e6fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 586.504354] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquiring lock "cc257ece-4b3e-45c8-a1a7-69330848ad89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 586.508090] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Lock "cc257ece-4b3e-45c8-a1a7-69330848ad89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 586.525459] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 586.602153] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 586.602680] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 586.608120] env[67008]: INFO nova.compute.claims [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.753952] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5b55b1-cff0-4955-8c78-4db07e5cdb9d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.766021] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a26de79-5f0f-45e7-9582-b2c7aa3b62e3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.798209] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79b4048-dda0-4b4b-afc7-9437f4348567 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.806640] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52f3774-83ce-4ca5-9987-772d7a6def7b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.825563] env[67008]: DEBUG nova.compute.provider_tree [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.836953] env[67008]: DEBUG nova.scheduler.client.report [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 586.862035] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 586.862597] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 586.910168] env[67008]: DEBUG nova.compute.utils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 586.911990] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 586.911990] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 586.934169] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 587.017975] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 587.050678] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.050900] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.051106] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.051292] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.051429] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.051565] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.051762] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.051913] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.052092] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.052253] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.052413] env[67008]: DEBUG nova.virt.hardware [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.053378] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccab51e1-2953-4fc6-b9f8-c7a2be439018 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.065011] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Successfully updated port: 74188fdf-ba50-4f35-984c-bb871165158d {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 587.069392] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62389e23-74b9-4de3-b0fc-a72fa10766b1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.085598] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquiring lock "refresh_cache-8724bca5-56d0-4e6e-a178-4f3634d37007" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 587.085732] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquired lock "refresh_cache-8724bca5-56d0-4e6e-a178-4f3634d37007" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 587.085930] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.296025] env[67008]: DEBUG nova.policy [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '228364d01b8a440ba7916f7dadf2b7a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df12ae2c0f76449db929276dd7aefe19', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 587.328225] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.004521] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Successfully created port: 9c24d4f0-daca-4026-9fbb-241ec9bdf182 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 588.315920] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Updating instance_info_cache with network_info: [{"id": "74188fdf-ba50-4f35-984c-bb871165158d", "address": "fa:16:3e:18:6c:67", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74188fdf-ba", "ovs_interfaceid": "74188fdf-ba50-4f35-984c-bb871165158d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.331160] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Releasing lock "refresh_cache-8724bca5-56d0-4e6e-a178-4f3634d37007" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 588.332084] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Instance network_info: |[{"id": "74188fdf-ba50-4f35-984c-bb871165158d", "address": "fa:16:3e:18:6c:67", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74188fdf-ba", "ovs_interfaceid": "74188fdf-ba50-4f35-984c-bb871165158d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 588.333134] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:6c:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74188fdf-ba50-4f35-984c-bb871165158d', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 588.348547] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 588.349292] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59de3885-8c73-4790-b6ca-510253a26564 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.361877] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Created folder: OpenStack in parent group-v4. [ 588.362120] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Creating folder: Project (ce910e35b650464ebaa1008f0ad7115b). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 588.362669] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2ba7f96-4fb2-4546-80e9-b8ea770ac591 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.373155] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Created folder: Project (ce910e35b650464ebaa1008f0ad7115b) in parent group-v567993. [ 588.373155] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Creating folder: Instances. Parent ref: group-v567994. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 588.373155] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d44a3a03-dcec-40ac-9116-4a2aeeb460d9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.382264] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Created folder: Instances in parent group-v567994. [ 588.382529] env[67008]: DEBUG oslo.service.loopingcall [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.382714] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 588.382923] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-591f702a-b032-4da1-ada3-e46f9b9c71de {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.404732] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 588.404732] env[67008]: value = "task-2824825" [ 588.404732] env[67008]: _type = "Task" [ 588.404732] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.409648] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Successfully updated port: 0186ad22-e31f-4b81-b86c-40720ace171d {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 588.417226] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824825, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.434016] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "refresh_cache-668ce119-c0ee-4996-ae4a-bbe0a788cab5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 588.434016] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "refresh_cache-668ce119-c0ee-4996-ae4a-bbe0a788cab5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 588.434016] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 588.668036] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.924194] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824825, 'name': CreateVM_Task, 'duration_secs': 0.323302} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.924194] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 588.941890] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Successfully created port: 2344c7d0-075d-4290-928a-d632c97819c5 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 588.945750] env[67008]: DEBUG oslo_vmware.service [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799dadff-d3ed-4b4c-b8f8-3cc1bd793db3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.954487] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 588.954487] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 588.955692] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 588.955968] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8158646-df3f-4432-a1d2-c852378f1de3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.960793] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Waiting for the task: (returnval){ [ 588.960793] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5294d3cb-c349-bbab-7a8f-dc63224a4ef8" [ 588.960793] env[67008]: _type = "Task" [ 588.960793] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.971918] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5294d3cb-c349-bbab-7a8f-dc63224a4ef8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.037707] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 589.038342] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 589.051410] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 589.119022] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 589.119022] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 589.119022] env[67008]: INFO nova.compute.claims [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.305138] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facb9fee-4edb-4a24-af67-820d39576eee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.316897] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5985e6e4-4b3f-4734-a349-3e66a0232e49 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.357711] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cef050-d739-4f9e-9d64-2509a0669027 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.366738] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2a820d-ebb9-4871-9f18-81095cb0e46a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.382976] env[67008]: DEBUG nova.compute.provider_tree [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.396993] env[67008]: DEBUG nova.scheduler.client.report [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 589.424265] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 589.424265] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 589.475798] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 589.476383] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.476952] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 589.477365] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 589.479088] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.479088] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6094ec4-4cac-4a2b-ac67-6d0d80c096c0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.482309] env[67008]: DEBUG nova.compute.utils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.484485] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 589.484763] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 589.502566] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.502718] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 589.503277] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 589.510236] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668a68de-a65f-4a70-a744-b9d72742e18b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.518204] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aec78a9b-4373-4e6e-a396-17ebcbc0b1ee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.523638] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Waiting for the task: (returnval){ [ 589.523638] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]529a0062-2403-668e-2fc9-7290cefa4417" [ 589.523638] env[67008]: _type = "Task" [ 589.523638] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.533168] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]529a0062-2403-668e-2fc9-7290cefa4417, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.593802] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 589.626180] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 589.626449] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 589.626605] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.626783] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 589.626926] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.627136] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 589.627292] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 589.627448] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 589.627613] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 589.627769] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 589.627936] env[67008]: DEBUG nova.virt.hardware [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 589.629117] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51f33f3-ace9-4a9f-97c9-55a1ac5f3d15 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.637637] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d61be2-e1c9-47e8-9350-3ea826b66638 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.820568] env[67008]: DEBUG nova.compute.manager [req-c1ac41bb-f0fc-4ce0-933c-e4118e92b9ef req-af51d771-d9f0-48e7-89fb-d84be15fb609 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Received event network-vif-plugged-74188fdf-ba50-4f35-984c-bb871165158d {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 589.820568] env[67008]: DEBUG oslo_concurrency.lockutils [req-c1ac41bb-f0fc-4ce0-933c-e4118e92b9ef req-af51d771-d9f0-48e7-89fb-d84be15fb609 service nova] Acquiring lock "8724bca5-56d0-4e6e-a178-4f3634d37007-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 589.820568] env[67008]: DEBUG oslo_concurrency.lockutils [req-c1ac41bb-f0fc-4ce0-933c-e4118e92b9ef req-af51d771-d9f0-48e7-89fb-d84be15fb609 service nova] Lock "8724bca5-56d0-4e6e-a178-4f3634d37007-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 589.820568] env[67008]: DEBUG oslo_concurrency.lockutils [req-c1ac41bb-f0fc-4ce0-933c-e4118e92b9ef req-af51d771-d9f0-48e7-89fb-d84be15fb609 service nova] Lock "8724bca5-56d0-4e6e-a178-4f3634d37007-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 589.820799] env[67008]: DEBUG nova.compute.manager [req-c1ac41bb-f0fc-4ce0-933c-e4118e92b9ef req-af51d771-d9f0-48e7-89fb-d84be15fb609 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] No waiting events found dispatching network-vif-plugged-74188fdf-ba50-4f35-984c-bb871165158d {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 589.820799] env[67008]: WARNING nova.compute.manager [req-c1ac41bb-f0fc-4ce0-933c-e4118e92b9ef req-af51d771-d9f0-48e7-89fb-d84be15fb609 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Received unexpected event network-vif-plugged-74188fdf-ba50-4f35-984c-bb871165158d for instance with vm_state building and task_state spawning. [ 589.857010] env[67008]: DEBUG nova.policy [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ddbc6f5b8a84cdc894ef15dff60da57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd1d53a5ba5d44a969626b1dc7f15c4d0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 589.865925] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Updating instance_info_cache with network_info: [{"id": "0186ad22-e31f-4b81-b86c-40720ace171d", "address": "fa:16:3e:74:8b:9e", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0186ad22-e3", "ovs_interfaceid": "0186ad22-e31f-4b81-b86c-40720ace171d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.883087] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "refresh_cache-668ce119-c0ee-4996-ae4a-bbe0a788cab5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 589.883438] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Instance network_info: |[{"id": "0186ad22-e31f-4b81-b86c-40720ace171d", "address": "fa:16:3e:74:8b:9e", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0186ad22-e3", "ovs_interfaceid": "0186ad22-e31f-4b81-b86c-40720ace171d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 589.883815] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:8b:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0186ad22-e31f-4b81-b86c-40720ace171d', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.891769] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating folder: Project (d2ffa090d43c4facaec9fcb96575a5f6). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.896428] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-554838c8-b6d4-4a25-93c3-e481f57df2ec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.908889] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created folder: Project (d2ffa090d43c4facaec9fcb96575a5f6) in parent group-v567993. [ 589.909113] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating folder: Instances. Parent ref: group-v567997. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 589.909942] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89f1097e-4ecf-44d2-833a-1a8f989c9866 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.920128] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created folder: Instances in parent group-v567997. [ 589.920128] env[67008]: DEBUG oslo.service.loopingcall [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.920128] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 589.920128] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0658d7d3-f4a1-414e-89df-a0d6aa54581b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.942650] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.942650] env[67008]: value = "task-2824828" [ 589.942650] env[67008]: _type = "Task" [ 589.942650] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.953333] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824828, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.037164] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 590.040830] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Creating directory with path [datastore1] vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 590.040830] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7922b735-e605-413a-b298-c1c5ee1c009e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.061910] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Created directory with path [datastore1] vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 590.061910] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Fetch image to [datastore1] vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 590.062164] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 590.062876] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f8946d-4abe-4ccc-aee0-262487b767b5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.072106] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f3bd89-632d-46dd-93a7-2f742e63fa0c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.081288] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba837753-916c-4f48-b542-d57658667add {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.119281] env[67008]: DEBUG nova.compute.manager [req-4df27db1-dd82-4032-a9d4-66284b19c4da req-d5e5b507-902d-46fe-b14b-f32f3e9086bb service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Received event network-vif-plugged-0186ad22-e31f-4b81-b86c-40720ace171d {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 590.119517] env[67008]: DEBUG oslo_concurrency.lockutils [req-4df27db1-dd82-4032-a9d4-66284b19c4da req-d5e5b507-902d-46fe-b14b-f32f3e9086bb service nova] Acquiring lock "668ce119-c0ee-4996-ae4a-bbe0a788cab5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 590.119743] env[67008]: DEBUG oslo_concurrency.lockutils [req-4df27db1-dd82-4032-a9d4-66284b19c4da req-d5e5b507-902d-46fe-b14b-f32f3e9086bb service nova] Lock "668ce119-c0ee-4996-ae4a-bbe0a788cab5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 590.119926] env[67008]: DEBUG oslo_concurrency.lockutils [req-4df27db1-dd82-4032-a9d4-66284b19c4da req-d5e5b507-902d-46fe-b14b-f32f3e9086bb service nova] Lock "668ce119-c0ee-4996-ae4a-bbe0a788cab5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 590.120115] env[67008]: DEBUG nova.compute.manager [req-4df27db1-dd82-4032-a9d4-66284b19c4da req-d5e5b507-902d-46fe-b14b-f32f3e9086bb service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] No waiting events found dispatching network-vif-plugged-0186ad22-e31f-4b81-b86c-40720ace171d {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 590.120282] env[67008]: WARNING nova.compute.manager [req-4df27db1-dd82-4032-a9d4-66284b19c4da req-d5e5b507-902d-46fe-b14b-f32f3e9086bb service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Received unexpected event network-vif-plugged-0186ad22-e31f-4b81-b86c-40720ace171d for instance with vm_state building and task_state spawning. [ 590.121166] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb41caa-8b1f-403a-8c40-6e2e08a99118 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.131689] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6658c0cd-d1db-438e-83e7-5941bb0bdf9d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.154074] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 590.237195] env[67008]: DEBUG oslo_vmware.rw_handles [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 590.308129] env[67008]: DEBUG oslo_vmware.rw_handles [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 590.308339] env[67008]: DEBUG oslo_vmware.rw_handles [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 590.455466] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824828, 'name': CreateVM_Task, 'duration_secs': 0.35688} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.455932] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 590.456778] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 590.456957] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 590.457316] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 590.457650] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-212ee4af-6339-4e7f-8ea6-d6ea747d1c0e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.465098] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 590.465098] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52824b06-da36-ec98-b2cc-ab8a45f21903" [ 590.465098] env[67008]: _type = "Task" [ 590.465098] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.476551] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52824b06-da36-ec98-b2cc-ab8a45f21903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.977366] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 590.978525] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.978525] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 591.735046] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Successfully created port: e92057e4-be60-4137-85c2-1ae9ba0fb041 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.868203] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.869972] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.869972] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 591.869972] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 591.894999] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 591.895227] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 591.895361] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 591.895485] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 591.895603] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 591.895721] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 591.896681] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.896967] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.897210] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.897420] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.897605] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.897812] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.897987] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 591.898152] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 591.922609] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 591.922911] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 591.924991] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 591.925834] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 591.925834] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 591.925834] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 591.927041] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c382768-fd72-4988-b169-8d0e124eb476 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.939217] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111f0322-1bd5-4b59-a942-465c44b4ac22 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.944961] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 591.960648] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a05960d-e165-4b0e-a074-d5c790213923 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.970124] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58580d72-708f-4930-8a75-c2d064098251 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.002728] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181093MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 592.002940] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 592.003199] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 592.026688] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 592.065451] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 668ce119-c0ee-4996-ae4a-bbe0a788cab5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 592.065616] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8724bca5-56d0-4e6e-a178-4f3634d37007 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 592.065752] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e5e1de51-40a9-4b43-b885-87501738dc96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 592.065875] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cc257ece-4b3e-45c8-a1a7-69330848ad89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 592.065991] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 592.098362] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 592.098585] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 592.098736] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 592.104304] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Successfully updated port: 9c24d4f0-daca-4026-9fbb-241ec9bdf182 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.124888] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquiring lock "refresh_cache-e5e1de51-40a9-4b43-b885-87501738dc96" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 592.124993] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquired lock "refresh_cache-e5e1de51-40a9-4b43-b885-87501738dc96" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 592.125149] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.223588] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508c0a7a-f317-4d13-9f35-9921a9573859 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.235826] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de9c811-1f1c-4a4e-80db-a7507ddbfe49 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.270466] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51968a5a-a4de-4047-8c7e-7933f699014d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.279865] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2a4094-8606-4207-b6dc-06f45852a929 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.298827] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.307695] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.312298] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 592.342446] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 592.342446] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.338s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 592.342446] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.315s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 592.343717] env[67008]: INFO nova.compute.claims [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.417114] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Successfully updated port: 2344c7d0-075d-4290-928a-d632c97819c5 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.432598] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquiring lock "refresh_cache-cc257ece-4b3e-45c8-a1a7-69330848ad89" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 592.433967] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquired lock "refresh_cache-cc257ece-4b3e-45c8-a1a7-69330848ad89" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 592.433967] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 592.578987] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7d1a82-7887-46d8-8062-7b74503b1d68 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.590234] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8228fbe6-9996-4dd3-8bea-8ecd769e84e5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.631584] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.634169] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51b2660-4d6f-442e-a13c-07e996741b23 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.646460] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b18562-1a32-4a3f-a331-71e191c17e17 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.663067] env[67008]: DEBUG nova.compute.provider_tree [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.676406] env[67008]: DEBUG nova.scheduler.client.report [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 592.691633] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.350s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 592.694269] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 592.771264] env[67008]: DEBUG nova.compute.utils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 592.774526] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 592.774526] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 592.789271] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 592.867116] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 592.895348] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 592.895348] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 592.895348] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.895563] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 592.895563] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.895563] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 592.895563] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 592.895787] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 592.895861] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 592.896680] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 592.896954] env[67008]: DEBUG nova.virt.hardware [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 592.897933] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6451d001-6767-4b49-beb9-026e92865dea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.906392] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442c3e4e-385b-4bbb-8776-0407677deb1b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.319776] env[67008]: DEBUG nova.policy [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85031ce221f64527aae7e9ec69f6c437', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2825a573ba104b97a5b863d29e7830ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 593.489893] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Updating instance_info_cache with network_info: [{"id": "2344c7d0-075d-4290-928a-d632c97819c5", "address": "fa:16:3e:3e:7d:ad", "network": {"id": "31a4bdb4-5261-44fb-b91c-e091333e60e3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1194020491-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df12ae2c0f76449db929276dd7aefe19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2344c7d0-07", "ovs_interfaceid": "2344c7d0-075d-4290-928a-d632c97819c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.516999] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Releasing lock "refresh_cache-cc257ece-4b3e-45c8-a1a7-69330848ad89" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 593.517323] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Instance network_info: |[{"id": "2344c7d0-075d-4290-928a-d632c97819c5", "address": "fa:16:3e:3e:7d:ad", "network": {"id": "31a4bdb4-5261-44fb-b91c-e091333e60e3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1194020491-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df12ae2c0f76449db929276dd7aefe19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2344c7d0-07", "ovs_interfaceid": "2344c7d0-075d-4290-928a-d632c97819c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 593.517706] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:7d:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2344c7d0-075d-4290-928a-d632c97819c5', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.529933] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Creating folder: Project (df12ae2c0f76449db929276dd7aefe19). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 593.531137] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Updating instance_info_cache with network_info: [{"id": "9c24d4f0-daca-4026-9fbb-241ec9bdf182", "address": "fa:16:3e:24:fd:3d", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c24d4f0-da", "ovs_interfaceid": "9c24d4f0-daca-4026-9fbb-241ec9bdf182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.537018] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa1027ce-179b-41b5-b84c-bf5ce5cf6675 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.549428] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Created folder: Project (df12ae2c0f76449db929276dd7aefe19) in parent group-v567993. [ 593.549634] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Creating folder: Instances. Parent ref: group-v568000. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 593.550182] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58b2f266-77c5-4dca-aa05-4281d7d0c348 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.555853] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Releasing lock "refresh_cache-e5e1de51-40a9-4b43-b885-87501738dc96" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 593.556151] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Instance network_info: |[{"id": "9c24d4f0-daca-4026-9fbb-241ec9bdf182", "address": "fa:16:3e:24:fd:3d", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c24d4f0-da", "ovs_interfaceid": "9c24d4f0-daca-4026-9fbb-241ec9bdf182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 593.556518] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:fd:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c24d4f0-daca-4026-9fbb-241ec9bdf182', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.565491] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Creating folder: Project (c0b6942a8c3247f3977b80df40c0e6fa). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 593.567523] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e29d62d-b4ff-4bac-a98a-3f44826e5daa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.572412] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Created folder: Instances in parent group-v568000. [ 593.572640] env[67008]: DEBUG oslo.service.loopingcall [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.573156] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 593.573753] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81059109-d643-4dd0-88ff-78be03f16660 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.593286] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Created folder: Project (c0b6942a8c3247f3977b80df40c0e6fa) in parent group-v567993. [ 593.593502] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Creating folder: Instances. Parent ref: group-v568002. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 593.595195] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75968bed-1052-4679-98da-f70915738667 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.605755] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.605755] env[67008]: value = "task-2824833" [ 593.605755] env[67008]: _type = "Task" [ 593.605755] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.607823] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Created folder: Instances in parent group-v568002. [ 593.608346] env[67008]: DEBUG oslo.service.loopingcall [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.611671] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 593.611891] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d11ee2e7-9bbb-471d-84a7-58ea8981c093 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.634540] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824833, 'name': CreateVM_Task} progress is 6%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.635907] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.635907] env[67008]: value = "task-2824834" [ 593.635907] env[67008]: _type = "Task" [ 593.635907] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.124996] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824833, 'name': CreateVM_Task, 'duration_secs': 0.392277} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.125763] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 594.126884] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 594.127346] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 594.127911] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 594.128402] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79066e07-ff1f-47cb-8289-3d9ac408f0ee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.137548] env[67008]: DEBUG oslo_vmware.api [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Waiting for the task: (returnval){ [ 594.137548] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52b31adf-90bc-3f3c-d8f5-554242a2a0b9" [ 594.137548] env[67008]: _type = "Task" [ 594.137548] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.161596] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824834, 'name': CreateVM_Task, 'duration_secs': 0.406125} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.162965] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 594.162965] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 594.162965] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 594.163133] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 594.164150] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 594.164364] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 594.166470] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 594.166841] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4bf9a3b-73ec-4117-a8ba-791f3a3ffe73 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.172972] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Waiting for the task: (returnval){ [ 594.172972] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]523975e0-888c-efe5-3dcb-9ef825a9d9d0" [ 594.172972] env[67008]: _type = "Task" [ 594.172972] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.184338] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]523975e0-888c-efe5-3dcb-9ef825a9d9d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.687086] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 594.687907] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 594.687907] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 594.835690] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Successfully updated port: e92057e4-be60-4137-85c2-1ae9ba0fb041 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 594.837188] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "5472df57-f2bc-4a90-9251-13760f932d77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 594.837401] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "5472df57-f2bc-4a90-9251-13760f932d77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 594.851989] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "refresh_cache-24f99c22-49e9-486a-a2d7-a02a8da3f6d3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 594.852130] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquired lock "refresh_cache-24f99c22-49e9-486a-a2d7-a02a8da3f6d3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 594.852262] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 594.878308] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 594.931106] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.960960] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 594.960960] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 594.962444] env[67008]: INFO nova.compute.claims [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.151548] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa6a6e5-98cb-4b67-8479-52548b488659 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.163577] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08889232-41e6-4b7c-b411-5353993668e6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.199462] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9a308a-029a-4303-94fa-b876f8b6bd40 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.211664] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838f9070-6fc3-4d00-b2ad-28972e2f3409 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.231947] env[67008]: DEBUG nova.compute.provider_tree [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.242884] env[67008]: DEBUG nova.scheduler.client.report [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 595.250026] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Updating instance_info_cache with network_info: [{"id": "e92057e4-be60-4137-85c2-1ae9ba0fb041", "address": "fa:16:3e:de:9e:21", "network": {"id": "af516538-57d4-477b-af26-c3c3fead6bcc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-136840625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d1d53a5ba5d44a969626b1dc7f15c4d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape92057e4-be", "ovs_interfaceid": "e92057e4-be60-4137-85c2-1ae9ba0fb041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.260220] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Releasing lock "refresh_cache-24f99c22-49e9-486a-a2d7-a02a8da3f6d3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 595.260685] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Instance network_info: |[{"id": "e92057e4-be60-4137-85c2-1ae9ba0fb041", "address": "fa:16:3e:de:9e:21", "network": {"id": "af516538-57d4-477b-af26-c3c3fead6bcc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-136840625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d1d53a5ba5d44a969626b1dc7f15c4d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape92057e4-be", "ovs_interfaceid": "e92057e4-be60-4137-85c2-1ae9ba0fb041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 595.261036] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:9e:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c842425c-544e-4ce2-9657-512723bd318e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e92057e4-be60-4137-85c2-1ae9ba0fb041', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 595.269933] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Creating folder: Project (d1d53a5ba5d44a969626b1dc7f15c4d0). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 595.271683] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Successfully created port: 4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.273828] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b554c221-e370-4c29-abcf-3a07e56393e8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.276155] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.315s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 595.276659] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 595.291497] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Created folder: Project (d1d53a5ba5d44a969626b1dc7f15c4d0) in parent group-v567993. [ 595.291792] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Creating folder: Instances. Parent ref: group-v568006. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 595.291988] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75da0dd6-327c-412b-a0aa-e4ab2ab4143c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.302370] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Created folder: Instances in parent group-v568006. [ 595.302983] env[67008]: DEBUG oslo.service.loopingcall [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 595.303303] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 595.303458] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d7b042f-8479-47f2-bf89-48a357cbb863 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.328040] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.328040] env[67008]: value = "task-2824837" [ 595.328040] env[67008]: _type = "Task" [ 595.328040] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.338744] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824837, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.351931] env[67008]: DEBUG nova.compute.utils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 595.354210] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 595.354413] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 595.378826] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 595.490448] env[67008]: DEBUG nova.policy [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a44308a3d8d9496d907a64a1f923abd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '385f8341f9d34af38bedb3457b646875', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 595.497678] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 595.526894] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 595.527150] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 595.527310] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.527489] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 595.527635] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.527778] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 595.527983] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 595.528497] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 595.528856] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 595.529501] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 595.529704] env[67008]: DEBUG nova.virt.hardware [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.530623] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd77f50a-c4fd-4135-81aa-8153a770dfb5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.545172] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675e3b13-3fd9-4ddf-b582-679ebc12bc47 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.837070] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824837, 'name': CreateVM_Task, 'duration_secs': 0.355159} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.837438] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 595.838150] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 595.838319] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 595.838628] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 595.838871] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66437ab4-9d2a-4006-b9a0-2ede47e27165 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.843442] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Waiting for the task: (returnval){ [ 595.843442] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52544806-b9fd-be56-69af-9488466fde08" [ 595.843442] env[67008]: _type = "Task" [ 595.843442] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.852231] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52544806-b9fd-be56-69af-9488466fde08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.030549] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Successfully created port: d955ae4c-b4ec-4ea0-8cab-bc4eb898d951 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 596.353887] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 596.354821] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.354821] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 596.898052] env[67008]: DEBUG nova.compute.manager [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Received event network-changed-74188fdf-ba50-4f35-984c-bb871165158d {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 596.898125] env[67008]: DEBUG nova.compute.manager [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Refreshing instance network info cache due to event network-changed-74188fdf-ba50-4f35-984c-bb871165158d. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 596.898507] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] Acquiring lock "refresh_cache-8724bca5-56d0-4e6e-a178-4f3634d37007" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 596.902657] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] Acquired lock "refresh_cache-8724bca5-56d0-4e6e-a178-4f3634d37007" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 596.902657] env[67008]: DEBUG nova.network.neutron [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Refreshing network info cache for port 74188fdf-ba50-4f35-984c-bb871165158d {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 597.068720] env[67008]: DEBUG nova.compute.manager [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Received event network-changed-0186ad22-e31f-4b81-b86c-40720ace171d {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 597.068906] env[67008]: DEBUG nova.compute.manager [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Refreshing instance network info cache due to event network-changed-0186ad22-e31f-4b81-b86c-40720ace171d. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 597.069155] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Acquiring lock "refresh_cache-668ce119-c0ee-4996-ae4a-bbe0a788cab5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 597.071525] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Acquired lock "refresh_cache-668ce119-c0ee-4996-ae4a-bbe0a788cab5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 597.071525] env[67008]: DEBUG nova.network.neutron [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Refreshing network info cache for port 0186ad22-e31f-4b81-b86c-40720ace171d {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 597.968104] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Successfully updated port: d955ae4c-b4ec-4ea0-8cab-bc4eb898d951 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 597.984478] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "refresh_cache-5472df57-f2bc-4a90-9251-13760f932d77" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 597.984638] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired lock "refresh_cache-5472df57-f2bc-4a90-9251-13760f932d77" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 597.984826] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.057769] env[67008]: DEBUG nova.network.neutron [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Updated VIF entry in instance network info cache for port 74188fdf-ba50-4f35-984c-bb871165158d. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 598.058464] env[67008]: DEBUG nova.network.neutron [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Updating instance_info_cache with network_info: [{"id": "74188fdf-ba50-4f35-984c-bb871165158d", "address": "fa:16:3e:18:6c:67", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74188fdf-ba", "ovs_interfaceid": "74188fdf-ba50-4f35-984c-bb871165158d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.076628] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] Releasing lock "refresh_cache-8724bca5-56d0-4e6e-a178-4f3634d37007" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 598.076891] env[67008]: DEBUG nova.compute.manager [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Received event network-vif-plugged-2344c7d0-075d-4290-928a-d632c97819c5 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 598.077096] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] Acquiring lock "cc257ece-4b3e-45c8-a1a7-69330848ad89-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 598.077301] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] Lock "cc257ece-4b3e-45c8-a1a7-69330848ad89-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 598.077502] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] Lock "cc257ece-4b3e-45c8-a1a7-69330848ad89-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 598.077616] env[67008]: DEBUG nova.compute.manager [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] No waiting events found dispatching network-vif-plugged-2344c7d0-075d-4290-928a-d632c97819c5 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 598.077781] env[67008]: WARNING nova.compute.manager [req-d1117512-c8aa-42c1-b9e6-70f9e26bd178 req-3d4e5810-2213-4871-bd6b-70fd3bc1d608 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Received unexpected event network-vif-plugged-2344c7d0-075d-4290-928a-d632c97819c5 for instance with vm_state building and task_state spawning. [ 598.099568] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.153561] env[67008]: DEBUG nova.network.neutron [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Updated VIF entry in instance network info cache for port 0186ad22-e31f-4b81-b86c-40720ace171d. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 598.153926] env[67008]: DEBUG nova.network.neutron [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Updating instance_info_cache with network_info: [{"id": "0186ad22-e31f-4b81-b86c-40720ace171d", "address": "fa:16:3e:74:8b:9e", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0186ad22-e3", "ovs_interfaceid": "0186ad22-e31f-4b81-b86c-40720ace171d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.168344] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Releasing lock "refresh_cache-668ce119-c0ee-4996-ae4a-bbe0a788cab5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 598.168624] env[67008]: DEBUG nova.compute.manager [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Received event network-vif-plugged-9c24d4f0-daca-4026-9fbb-241ec9bdf182 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 598.168815] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Acquiring lock "e5e1de51-40a9-4b43-b885-87501738dc96-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 598.169054] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Lock "e5e1de51-40a9-4b43-b885-87501738dc96-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 598.170396] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Lock "e5e1de51-40a9-4b43-b885-87501738dc96-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 598.170574] env[67008]: DEBUG nova.compute.manager [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] No waiting events found dispatching network-vif-plugged-9c24d4f0-daca-4026-9fbb-241ec9bdf182 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 598.170742] env[67008]: WARNING nova.compute.manager [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Received unexpected event network-vif-plugged-9c24d4f0-daca-4026-9fbb-241ec9bdf182 for instance with vm_state building and task_state spawning. [ 598.170905] env[67008]: DEBUG nova.compute.manager [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Received event network-changed-9c24d4f0-daca-4026-9fbb-241ec9bdf182 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 598.171075] env[67008]: DEBUG nova.compute.manager [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Refreshing instance network info cache due to event network-changed-9c24d4f0-daca-4026-9fbb-241ec9bdf182. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 598.171291] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Acquiring lock "refresh_cache-e5e1de51-40a9-4b43-b885-87501738dc96" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 598.171448] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Acquired lock "refresh_cache-e5e1de51-40a9-4b43-b885-87501738dc96" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 598.171724] env[67008]: DEBUG nova.network.neutron [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Refreshing network info cache for port 9c24d4f0-daca-4026-9fbb-241ec9bdf182 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 598.264392] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Successfully updated port: 4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 598.278124] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "refresh_cache-3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 598.278124] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquired lock "refresh_cache-3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 598.278124] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.360540] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.814658] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Updating instance_info_cache with network_info: [{"id": "4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68", "address": "fa:16:3e:74:a5:f6", "network": {"id": "5220ac50-1124-42b6-8b71-5e4af46186a7", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-690748831-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2825a573ba104b97a5b863d29e7830ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec3b2d1-66", "ovs_interfaceid": "4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.826952] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Releasing lock "refresh_cache-3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 598.827515] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Instance network_info: |[{"id": "4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68", "address": "fa:16:3e:74:a5:f6", "network": {"id": "5220ac50-1124-42b6-8b71-5e4af46186a7", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-690748831-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2825a573ba104b97a5b863d29e7830ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec3b2d1-66", "ovs_interfaceid": "4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 598.827660] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:a5:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.838655] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Creating folder: Project (2825a573ba104b97a5b863d29e7830ce). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.839901] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-532e6efa-820d-4d0c-a5d3-b66bb4644b69 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.854674] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Created folder: Project (2825a573ba104b97a5b863d29e7830ce) in parent group-v567993. [ 598.854905] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Creating folder: Instances. Parent ref: group-v568009. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.855379] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1fc8127f-72c8-4e26-b36d-7dd1ee7494c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.860465] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Updating instance_info_cache with network_info: [{"id": "d955ae4c-b4ec-4ea0-8cab-bc4eb898d951", "address": "fa:16:3e:19:b8:91", "network": {"id": "1a7c14af-11ce-40dd-92f8-ac1a08161d50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1304694986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385f8341f9d34af38bedb3457b646875", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd955ae4c-b4", "ovs_interfaceid": "d955ae4c-b4ec-4ea0-8cab-bc4eb898d951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.864992] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Created folder: Instances in parent group-v568009. [ 598.866017] env[67008]: DEBUG oslo.service.loopingcall [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.866017] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 598.866017] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2472ad2-6f72-44ac-b5c6-9d44e724e61f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.882804] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Releasing lock "refresh_cache-5472df57-f2bc-4a90-9251-13760f932d77" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 598.883105] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Instance network_info: |[{"id": "d955ae4c-b4ec-4ea0-8cab-bc4eb898d951", "address": "fa:16:3e:19:b8:91", "network": {"id": "1a7c14af-11ce-40dd-92f8-ac1a08161d50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1304694986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385f8341f9d34af38bedb3457b646875", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd955ae4c-b4", "ovs_interfaceid": "d955ae4c-b4ec-4ea0-8cab-bc4eb898d951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 598.883904] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:b8:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd955ae4c-b4ec-4ea0-8cab-bc4eb898d951', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.891444] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating folder: Project (385f8341f9d34af38bedb3457b646875). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.893951] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01514400-f383-4a0a-ab5f-369741e105a1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.895471] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.895471] env[67008]: value = "task-2824840" [ 598.895471] env[67008]: _type = "Task" [ 598.895471] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.907162] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824840, 'name': CreateVM_Task} progress is 6%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.908010] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Created folder: Project (385f8341f9d34af38bedb3457b646875) in parent group-v567993. [ 598.909208] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating folder: Instances. Parent ref: group-v568011. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.909208] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a6f5a82-7428-4e19-b8df-7b8318e4c87d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.922091] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Created folder: Instances in parent group-v568011. [ 598.922626] env[67008]: DEBUG oslo.service.loopingcall [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.924027] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 598.924027] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db4d63b8-efd4-4897-bf13-971196ee69c4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.947886] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.947886] env[67008]: value = "task-2824843" [ 598.947886] env[67008]: _type = "Task" [ 598.947886] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.955956] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824843, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.051133] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 599.051548] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 599.063324] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 599.142058] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 599.142058] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 599.143606] env[67008]: INFO nova.compute.claims [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 599.208967] env[67008]: DEBUG nova.network.neutron [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Updated VIF entry in instance network info cache for port 9c24d4f0-daca-4026-9fbb-241ec9bdf182. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 599.208967] env[67008]: DEBUG nova.network.neutron [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Updating instance_info_cache with network_info: [{"id": "9c24d4f0-daca-4026-9fbb-241ec9bdf182", "address": "fa:16:3e:24:fd:3d", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.114", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c24d4f0-da", "ovs_interfaceid": "9c24d4f0-daca-4026-9fbb-241ec9bdf182", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.228683] env[67008]: DEBUG oslo_concurrency.lockutils [req-476f7b8e-8c57-4fd6-8461-3e4b8c995d4a req-b0ee2f45-bf08-4e0a-bfcb-5485afa8d00e service nova] Releasing lock "refresh_cache-e5e1de51-40a9-4b43-b885-87501738dc96" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 599.410438] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824840, 'name': CreateVM_Task, 'duration_secs': 0.384673} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.410734] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 599.411618] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 599.411702] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 599.412010] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 599.412334] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4fc5cb8-7009-4e74-8ea6-ae56e864f837 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.417775] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Waiting for the task: (returnval){ [ 599.417775] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52485c52-56ea-f877-a8de-f944ef3e92c2" [ 599.417775] env[67008]: _type = "Task" [ 599.417775] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.433565] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52485c52-56ea-f877-a8de-f944ef3e92c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.435275] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179d9362-cb43-4e0e-bfb2-7953c75a7697 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.445920] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143217cc-74b9-418b-b1ab-8062213f29c0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.458660] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824843, 'name': CreateVM_Task, 'duration_secs': 0.386705} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.487969] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 599.488904] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 599.489726] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb67402-229a-40b2-9ab9-8377582c91ef {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.499307] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f952206-160e-4dc4-bba0-9be87547cf6b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.519770] env[67008]: DEBUG nova.compute.provider_tree [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.534209] env[67008]: DEBUG nova.scheduler.client.report [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 599.559242] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.418s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 599.560570] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 599.608023] env[67008]: DEBUG nova.compute.utils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 599.612534] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 599.615883] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 599.622286] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 599.703024] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 599.738259] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 599.740143] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 599.741289] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 599.741416] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 599.741557] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 599.742092] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 599.742092] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 599.742200] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 599.742337] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 599.742506] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 599.742674] env[67008]: DEBUG nova.virt.hardware [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 599.743937] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fb902e-6c5c-40fd-95ad-de0e0fe16889 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.759969] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0e10b3-a072-4687-bd89-48a740edd249 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.814317] env[67008]: DEBUG nova.policy [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '089599a607e74999be4a753c7ca800b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ff5b8556262462b989083d70debb060', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 599.932676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 599.932676] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 599.932676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 599.932676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 599.932916] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 599.933024] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d8aa976-00ad-49f7-a750-aaf300708e8f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.938816] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 599.938816] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5241385b-b7d1-5e7b-ccad-3dbf28acc04a" [ 599.938816] env[67008]: _type = "Task" [ 599.938816] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.948957] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5241385b-b7d1-5e7b-ccad-3dbf28acc04a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.373525] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "72dc7fb5-e94e-4784-9864-a1731ea7c755" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 600.373934] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 600.388952] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 600.451467] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 600.451735] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.451939] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 600.494764] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 600.495026] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 600.496695] env[67008]: INFO nova.compute.claims [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.539917] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 600.541774] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 600.562474] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 600.632911] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Successfully created port: 0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.658422] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 600.788731] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38516b2-57cc-4770-aef6-9c8e74831a54 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.796755] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340594dc-3651-4d60-a142-065d64f00ca5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.834638] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd3bb3b-d469-4b3e-9d39-117d830c61d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.843030] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ec38b3-6eef-4ccb-96f2-3723eb0998cc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.860284] env[67008]: DEBUG nova.compute.provider_tree [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.871642] env[67008]: DEBUG nova.scheduler.client.report [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 600.900296] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.405s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 600.900794] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 600.905357] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.248s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 600.909111] env[67008]: INFO nova.compute.claims [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.960229] env[67008]: DEBUG nova.compute.utils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.961548] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 600.961714] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.501786] env[67008]: DEBUG nova.policy [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a44308a3d8d9496d907a64a1f923abd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '385f8341f9d34af38bedb3457b646875', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 601.503656] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 601.631559] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 601.679283] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.679283] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.679283] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.679478] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.679604] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.679711] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.679908] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.680067] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.680222] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.680376] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.680536] env[67008]: DEBUG nova.virt.hardware [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.681744] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1aace12-b418-4db8-9294-dc07e82dd086 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.701596] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6583162a-8bf1-4fdf-b983-c83e4698db7c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.740354] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "40a26f4e-0be9-4770-83a7-31c87dbf921f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 601.740354] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 601.741408] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecc4f3c-ff39-441f-b750-d381ea9159d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.749523] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edb8de4-92a1-4c76-92b7-00e2526a994e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.784625] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9adcf44-9377-4fd3-ba02-aca1c29590b0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.793032] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8db0149-8fec-4120-acf6-99c559b306fb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.806905] env[67008]: DEBUG nova.compute.provider_tree [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.816166] env[67008]: DEBUG nova.scheduler.client.report [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 601.835061] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.930s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 601.835550] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 601.890134] env[67008]: DEBUG nova.compute.utils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.891566] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 601.891566] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.905506] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 602.006747] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 602.043457] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 602.043710] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 602.043860] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.044427] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 602.044675] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.044834] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 602.045059] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 602.045225] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 602.045390] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 602.045550] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 602.045750] env[67008]: DEBUG nova.virt.hardware [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.046938] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b66bc72-1a2c-4d84-ab67-e7dc856cd801 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.059989] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aef15a4-fdd9-4a99-ad1d-3aea010cb6e6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.135205] env[67008]: DEBUG nova.policy [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4429d8841771463f896a57600994844e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f58ceaf700d4a49b5e9680e344c808f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 602.148957] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Received event network-changed-2344c7d0-075d-4290-928a-d632c97819c5 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 602.148957] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Refreshing instance network info cache due to event network-changed-2344c7d0-075d-4290-928a-d632c97819c5. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 602.148957] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquiring lock "refresh_cache-cc257ece-4b3e-45c8-a1a7-69330848ad89" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 602.148957] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquired lock "refresh_cache-cc257ece-4b3e-45c8-a1a7-69330848ad89" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 602.149151] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Refreshing network info cache for port 2344c7d0-075d-4290-928a-d632c97819c5 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.173655] env[67008]: DEBUG nova.compute.manager [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Received event network-vif-plugged-4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 602.173823] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] Acquiring lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 602.173940] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 602.174129] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 602.174298] env[67008]: DEBUG nova.compute.manager [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] No waiting events found dispatching network-vif-plugged-4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 602.178167] env[67008]: WARNING nova.compute.manager [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Received unexpected event network-vif-plugged-4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68 for instance with vm_state building and task_state spawning. [ 602.181602] env[67008]: DEBUG nova.compute.manager [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Received event network-changed-4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 602.181859] env[67008]: DEBUG nova.compute.manager [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Refreshing instance network info cache due to event network-changed-4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 602.182086] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] Acquiring lock "refresh_cache-3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 602.182231] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] Acquired lock "refresh_cache-3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 602.182393] env[67008]: DEBUG nova.network.neutron [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Refreshing network info cache for port 4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.544024] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Successfully created port: f28dcdcf-9eb7-48c8-9d04-79251c6be6a9 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.069193] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Successfully updated port: 0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 603.084228] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "refresh_cache-b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 603.084412] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquired lock "refresh_cache-b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 603.084567] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 603.362339] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.718781] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Updated VIF entry in instance network info cache for port 2344c7d0-075d-4290-928a-d632c97819c5. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 603.718781] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Updating instance_info_cache with network_info: [{"id": "2344c7d0-075d-4290-928a-d632c97819c5", "address": "fa:16:3e:3e:7d:ad", "network": {"id": "31a4bdb4-5261-44fb-b91c-e091333e60e3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1194020491-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df12ae2c0f76449db929276dd7aefe19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2344c7d0-07", "ovs_interfaceid": "2344c7d0-075d-4290-928a-d632c97819c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.727649] env[67008]: DEBUG nova.network.neutron [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Updated VIF entry in instance network info cache for port 4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 603.727649] env[67008]: DEBUG nova.network.neutron [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Updating instance_info_cache with network_info: [{"id": "4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68", "address": "fa:16:3e:74:a5:f6", "network": {"id": "5220ac50-1124-42b6-8b71-5e4af46186a7", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-690748831-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2825a573ba104b97a5b863d29e7830ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec3b2d1-66", "ovs_interfaceid": "4ec3b2d1-664e-4ddf-b5e4-2bb46678fb68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.738116] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Releasing lock "refresh_cache-cc257ece-4b3e-45c8-a1a7-69330848ad89" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 603.738116] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Received event network-vif-plugged-e92057e4-be60-4137-85c2-1ae9ba0fb041 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 603.738116] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquiring lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 603.738272] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 603.738422] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 603.738583] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] No waiting events found dispatching network-vif-plugged-e92057e4-be60-4137-85c2-1ae9ba0fb041 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 603.738738] env[67008]: WARNING nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Received unexpected event network-vif-plugged-e92057e4-be60-4137-85c2-1ae9ba0fb041 for instance with vm_state building and task_state spawning. [ 603.738891] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Received event network-changed-e92057e4-be60-4137-85c2-1ae9ba0fb041 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 603.739046] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Refreshing instance network info cache due to event network-changed-e92057e4-be60-4137-85c2-1ae9ba0fb041. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 603.739219] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquiring lock "refresh_cache-24f99c22-49e9-486a-a2d7-a02a8da3f6d3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 603.739343] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquired lock "refresh_cache-24f99c22-49e9-486a-a2d7-a02a8da3f6d3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 603.739486] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Refreshing network info cache for port e92057e4-be60-4137-85c2-1ae9ba0fb041 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 603.751306] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7e6533c-5c82-4464-89c7-35787922ae39 req-197d68a3-b4ca-46ac-8ec0-65ac93121df2 service nova] Releasing lock "refresh_cache-3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 603.928524] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Updating instance_info_cache with network_info: [{"id": "0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1", "address": "fa:16:3e:40:1b:9a", "network": {"id": "4c61661a-e3e2-44de-a107-0452067106df", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-143967869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ff5b8556262462b989083d70debb060", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a0a6549-a5", "ovs_interfaceid": "0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.958082] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Releasing lock "refresh_cache-b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 603.958626] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Instance network_info: |[{"id": "0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1", "address": "fa:16:3e:40:1b:9a", "network": {"id": "4c61661a-e3e2-44de-a107-0452067106df", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-143967869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ff5b8556262462b989083d70debb060", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a0a6549-a5", "ovs_interfaceid": "0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 603.959855] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:1b:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfae3ef8-cae7-455d-8632-ba93e1671625', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.974946] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Creating folder: Project (0ff5b8556262462b989083d70debb060). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.974946] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58992af0-14f3-490f-974a-105e969a05ca {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.996989] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Created folder: Project (0ff5b8556262462b989083d70debb060) in parent group-v567993. [ 603.997211] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Creating folder: Instances. Parent ref: group-v568018. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.997451] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ba19edf-a0fb-474c-bd09-9cd0ef16bd12 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.012064] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Created folder: Instances in parent group-v568018. [ 604.012345] env[67008]: DEBUG oslo.service.loopingcall [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.012545] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 604.012749] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13b42f77-f480-4d0e-b261-1a1cd53c76dd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.034101] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.034101] env[67008]: value = "task-2824850" [ 604.034101] env[67008]: _type = "Task" [ 604.034101] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.043904] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.401391] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Successfully created port: 92b81921-a19a-4cd4-aed4-7c8c2a9217d7 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.544492] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.907566] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Successfully updated port: f28dcdcf-9eb7-48c8-9d04-79251c6be6a9 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 604.921863] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "refresh_cache-72dc7fb5-e94e-4784-9864-a1731ea7c755" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 604.922070] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired lock "refresh_cache-72dc7fb5-e94e-4784-9864-a1731ea7c755" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 604.922206] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 605.045404] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.196454] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.382052] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Updated VIF entry in instance network info cache for port e92057e4-be60-4137-85c2-1ae9ba0fb041. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 605.384190] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Updating instance_info_cache with network_info: [{"id": "e92057e4-be60-4137-85c2-1ae9ba0fb041", "address": "fa:16:3e:de:9e:21", "network": {"id": "af516538-57d4-477b-af26-c3c3fead6bcc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-136840625-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d1d53a5ba5d44a969626b1dc7f15c4d0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c842425c-544e-4ce2-9657-512723bd318e", "external-id": "nsx-vlan-transportzone-80", "segmentation_id": 80, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape92057e4-be", "ovs_interfaceid": "e92057e4-be60-4137-85c2-1ae9ba0fb041", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.402310] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Releasing lock "refresh_cache-24f99c22-49e9-486a-a2d7-a02a8da3f6d3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 605.402565] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Received event network-vif-plugged-d955ae4c-b4ec-4ea0-8cab-bc4eb898d951 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 605.402757] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquiring lock "5472df57-f2bc-4a90-9251-13760f932d77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 605.402953] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Lock "5472df57-f2bc-4a90-9251-13760f932d77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 605.403121] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Lock "5472df57-f2bc-4a90-9251-13760f932d77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 605.403284] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] No waiting events found dispatching network-vif-plugged-d955ae4c-b4ec-4ea0-8cab-bc4eb898d951 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 605.403483] env[67008]: WARNING nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Received unexpected event network-vif-plugged-d955ae4c-b4ec-4ea0-8cab-bc4eb898d951 for instance with vm_state building and task_state spawning. [ 605.403658] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Received event network-changed-d955ae4c-b4ec-4ea0-8cab-bc4eb898d951 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 605.403808] env[67008]: DEBUG nova.compute.manager [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Refreshing instance network info cache due to event network-changed-d955ae4c-b4ec-4ea0-8cab-bc4eb898d951. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 605.403995] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquiring lock "refresh_cache-5472df57-f2bc-4a90-9251-13760f932d77" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 605.404160] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Acquired lock "refresh_cache-5472df57-f2bc-4a90-9251-13760f932d77" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 605.404279] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Refreshing network info cache for port d955ae4c-b4ec-4ea0-8cab-bc4eb898d951 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 605.548647] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.051340] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.141935] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Updating instance_info_cache with network_info: [{"id": "f28dcdcf-9eb7-48c8-9d04-79251c6be6a9", "address": "fa:16:3e:3c:05:4c", "network": {"id": "1a7c14af-11ce-40dd-92f8-ac1a08161d50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1304694986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385f8341f9d34af38bedb3457b646875", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdcf-9e", "ovs_interfaceid": "f28dcdcf-9eb7-48c8-9d04-79251c6be6a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.161098] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Releasing lock "refresh_cache-72dc7fb5-e94e-4784-9864-a1731ea7c755" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 606.162198] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Instance network_info: |[{"id": "f28dcdcf-9eb7-48c8-9d04-79251c6be6a9", "address": "fa:16:3e:3c:05:4c", "network": {"id": "1a7c14af-11ce-40dd-92f8-ac1a08161d50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1304694986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385f8341f9d34af38bedb3457b646875", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdcf-9e", "ovs_interfaceid": "f28dcdcf-9eb7-48c8-9d04-79251c6be6a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 606.162296] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:05:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f28dcdcf-9eb7-48c8-9d04-79251c6be6a9', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.170471] env[67008]: DEBUG oslo.service.loopingcall [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.171572] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 606.172053] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-194e585b-0af4-411b-ba78-35918e74e475 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.195935] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.195935] env[67008]: value = "task-2824852" [ 606.195935] env[67008]: _type = "Task" [ 606.195935] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.210306] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824852, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.456862] env[67008]: DEBUG nova.compute.manager [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Received event network-vif-plugged-0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 606.457108] env[67008]: DEBUG oslo_concurrency.lockutils [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] Acquiring lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 606.457592] env[67008]: DEBUG oslo_concurrency.lockutils [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 606.457862] env[67008]: DEBUG oslo_concurrency.lockutils [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 606.458075] env[67008]: DEBUG nova.compute.manager [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] No waiting events found dispatching network-vif-plugged-0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 606.458289] env[67008]: WARNING nova.compute.manager [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Received unexpected event network-vif-plugged-0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1 for instance with vm_state building and task_state spawning. [ 606.458470] env[67008]: DEBUG nova.compute.manager [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Received event network-changed-0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 606.458627] env[67008]: DEBUG nova.compute.manager [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Refreshing instance network info cache due to event network-changed-0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 606.458845] env[67008]: DEBUG oslo_concurrency.lockutils [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] Acquiring lock "refresh_cache-b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 606.459054] env[67008]: DEBUG oslo_concurrency.lockutils [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] Acquired lock "refresh_cache-b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 606.459122] env[67008]: DEBUG nova.network.neutron [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Refreshing network info cache for port 0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 606.548225] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.713303] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824852, 'name': CreateVM_Task, 'duration_secs': 0.367078} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.713564] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 606.714376] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 606.714586] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 606.714980] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 606.715280] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5b344bd-09c3-46c1-8531-667718b30c15 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.720966] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 606.720966] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c7db69-1104-5c2b-81a8-64f914e5e502" [ 606.720966] env[67008]: _type = "Task" [ 606.720966] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.731056] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c7db69-1104-5c2b-81a8-64f914e5e502, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.899361] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Updated VIF entry in instance network info cache for port d955ae4c-b4ec-4ea0-8cab-bc4eb898d951. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 606.899568] env[67008]: DEBUG nova.network.neutron [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Updating instance_info_cache with network_info: [{"id": "d955ae4c-b4ec-4ea0-8cab-bc4eb898d951", "address": "fa:16:3e:19:b8:91", "network": {"id": "1a7c14af-11ce-40dd-92f8-ac1a08161d50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1304694986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385f8341f9d34af38bedb3457b646875", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd955ae4c-b4", "ovs_interfaceid": "d955ae4c-b4ec-4ea0-8cab-bc4eb898d951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.915655] env[67008]: DEBUG oslo_concurrency.lockutils [req-b12b00dd-d05d-4533-a200-5e4425d2fd3a req-6f7c1f6e-78a7-4ed7-9585-cd0d342cb381 service nova] Releasing lock "refresh_cache-5472df57-f2bc-4a90-9251-13760f932d77" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 607.048503] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.238747] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 607.239902] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 607.239902] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 607.405949] env[67008]: DEBUG nova.network.neutron [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Updated VIF entry in instance network info cache for port 0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 607.408900] env[67008]: DEBUG nova.network.neutron [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Updating instance_info_cache with network_info: [{"id": "0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1", "address": "fa:16:3e:40:1b:9a", "network": {"id": "4c61661a-e3e2-44de-a107-0452067106df", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-143967869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ff5b8556262462b989083d70debb060", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a0a6549-a5", "ovs_interfaceid": "0a0a6549-a5ba-40cb-bdf4-8cffc91a2df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.426952] env[67008]: DEBUG oslo_concurrency.lockutils [req-62102baf-1a92-4b52-9eb9-0cc523dd6e10 req-f7906c81-5a16-4756-a39f-dae4765e75c7 service nova] Releasing lock "refresh_cache-b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 607.551244] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.760415] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Successfully updated port: 92b81921-a19a-4cd4-aed4-7c8c2a9217d7 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 607.782633] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "refresh_cache-6ca0b308-d3d1-49bd-8ce1-813017b3c833" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 607.782633] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquired lock "refresh_cache-6ca0b308-d3d1-49bd-8ce1-813017b3c833" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 607.782633] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.884728] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.050208] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task} progress is 99%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.451620] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Updating instance_info_cache with network_info: [{"id": "92b81921-a19a-4cd4-aed4-7c8c2a9217d7", "address": "fa:16:3e:30:5b:14", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b81921-a1", "ovs_interfaceid": "92b81921-a19a-4cd4-aed4-7c8c2a9217d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.469275] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Releasing lock "refresh_cache-6ca0b308-d3d1-49bd-8ce1-813017b3c833" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 608.472043] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Instance network_info: |[{"id": "92b81921-a19a-4cd4-aed4-7c8c2a9217d7", "address": "fa:16:3e:30:5b:14", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b81921-a1", "ovs_interfaceid": "92b81921-a19a-4cd4-aed4-7c8c2a9217d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 608.472193] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:5b:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92b81921-a19a-4cd4-aed4-7c8c2a9217d7', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.479812] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Creating folder: Project (7f58ceaf700d4a49b5e9680e344c808f). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 608.480673] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a112f65-6648-4d58-9a70-50a8efd3fb1e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.496606] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Created folder: Project (7f58ceaf700d4a49b5e9680e344c808f) in parent group-v567993. [ 608.497686] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Creating folder: Instances. Parent ref: group-v568022. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 608.498093] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06500e71-25e6-4beb-9359-8242d94d28a9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.510064] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Created folder: Instances in parent group-v568022. [ 608.510311] env[67008]: DEBUG oslo.service.loopingcall [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 608.510500] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 608.510701] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-675cec9a-73f4-449d-a544-01882a56552c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.531955] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.531955] env[67008]: value = "task-2824856" [ 608.531955] env[67008]: _type = "Task" [ 608.531955] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.539693] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824856, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.547763] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824850, 'name': CreateVM_Task, 'duration_secs': 4.322271} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.547883] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 608.548505] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 608.548672] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 608.549077] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 608.549247] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c5e4771-7de3-4be1-81b7-42ff41c57486 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.554260] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Waiting for the task: (returnval){ [ 608.554260] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520c800b-47d2-255b-5f0b-c50f27ecde52" [ 608.554260] env[67008]: _type = "Task" [ 608.554260] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.563480] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520c800b-47d2-255b-5f0b-c50f27ecde52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.042463] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824856, 'name': CreateVM_Task, 'duration_secs': 0.336694} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.042550] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 609.043429] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 609.064256] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 609.064398] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.064521] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 609.064734] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 609.065051] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 609.065307] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-497d07ee-9bdd-4a8b-829b-091e6aa5031b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.072926] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Waiting for the task: (returnval){ [ 609.072926] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5295b8dd-0b0f-ed37-1b7a-5e84e40db149" [ 609.072926] env[67008]: _type = "Task" [ 609.072926] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.079069] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5295b8dd-0b0f-ed37-1b7a-5e84e40db149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.529120] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 609.529120] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 609.589399] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 609.589457] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.589660] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 610.013146] env[67008]: DEBUG nova.compute.manager [req-ebcb74fb-56ca-41f0-972e-64ebc61b4d46 req-7b2636dc-1611-4202-80e5-7a5dade24df0 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Received event network-vif-plugged-92b81921-a19a-4cd4-aed4-7c8c2a9217d7 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 610.013587] env[67008]: DEBUG oslo_concurrency.lockutils [req-ebcb74fb-56ca-41f0-972e-64ebc61b4d46 req-7b2636dc-1611-4202-80e5-7a5dade24df0 service nova] Acquiring lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 610.013587] env[67008]: DEBUG oslo_concurrency.lockutils [req-ebcb74fb-56ca-41f0-972e-64ebc61b4d46 req-7b2636dc-1611-4202-80e5-7a5dade24df0 service nova] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 610.014955] env[67008]: DEBUG oslo_concurrency.lockutils [req-ebcb74fb-56ca-41f0-972e-64ebc61b4d46 req-7b2636dc-1611-4202-80e5-7a5dade24df0 service nova] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 610.015323] env[67008]: DEBUG nova.compute.manager [req-ebcb74fb-56ca-41f0-972e-64ebc61b4d46 req-7b2636dc-1611-4202-80e5-7a5dade24df0 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] No waiting events found dispatching network-vif-plugged-92b81921-a19a-4cd4-aed4-7c8c2a9217d7 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.015372] env[67008]: WARNING nova.compute.manager [req-ebcb74fb-56ca-41f0-972e-64ebc61b4d46 req-7b2636dc-1611-4202-80e5-7a5dade24df0 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Received unexpected event network-vif-plugged-92b81921-a19a-4cd4-aed4-7c8c2a9217d7 for instance with vm_state building and task_state spawning. [ 610.103972] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 610.103972] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 610.655052] env[67008]: DEBUG nova.compute.manager [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Received event network-vif-plugged-f28dcdcf-9eb7-48c8-9d04-79251c6be6a9 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 610.655359] env[67008]: DEBUG oslo_concurrency.lockutils [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] Acquiring lock "72dc7fb5-e94e-4784-9864-a1731ea7c755-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 610.655359] env[67008]: DEBUG oslo_concurrency.lockutils [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 610.655483] env[67008]: DEBUG oslo_concurrency.lockutils [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 610.655623] env[67008]: DEBUG nova.compute.manager [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] No waiting events found dispatching network-vif-plugged-f28dcdcf-9eb7-48c8-9d04-79251c6be6a9 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.655783] env[67008]: WARNING nova.compute.manager [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Received unexpected event network-vif-plugged-f28dcdcf-9eb7-48c8-9d04-79251c6be6a9 for instance with vm_state building and task_state spawning. [ 610.655936] env[67008]: DEBUG nova.compute.manager [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Received event network-changed-f28dcdcf-9eb7-48c8-9d04-79251c6be6a9 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 610.656147] env[67008]: DEBUG nova.compute.manager [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Refreshing instance network info cache due to event network-changed-f28dcdcf-9eb7-48c8-9d04-79251c6be6a9. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 610.656290] env[67008]: DEBUG oslo_concurrency.lockutils [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] Acquiring lock "refresh_cache-72dc7fb5-e94e-4784-9864-a1731ea7c755" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 610.656425] env[67008]: DEBUG oslo_concurrency.lockutils [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] Acquired lock "refresh_cache-72dc7fb5-e94e-4784-9864-a1731ea7c755" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 610.656570] env[67008]: DEBUG nova.network.neutron [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Refreshing network info cache for port f28dcdcf-9eb7-48c8-9d04-79251c6be6a9 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 610.779348] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 610.779348] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 611.555992] env[67008]: DEBUG nova.network.neutron [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Updated VIF entry in instance network info cache for port f28dcdcf-9eb7-48c8-9d04-79251c6be6a9. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 611.556367] env[67008]: DEBUG nova.network.neutron [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Updating instance_info_cache with network_info: [{"id": "f28dcdcf-9eb7-48c8-9d04-79251c6be6a9", "address": "fa:16:3e:3c:05:4c", "network": {"id": "1a7c14af-11ce-40dd-92f8-ac1a08161d50", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1304694986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385f8341f9d34af38bedb3457b646875", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf28dcdcf-9e", "ovs_interfaceid": "f28dcdcf-9eb7-48c8-9d04-79251c6be6a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.572216] env[67008]: DEBUG oslo_concurrency.lockutils [req-7c1536d1-3dad-45a3-bd2b-79e083cd266b req-e8b24d9f-3585-43c1-8b43-83fe4f85dbb8 service nova] Releasing lock "refresh_cache-72dc7fb5-e94e-4784-9864-a1731ea7c755" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 611.988016] env[67008]: DEBUG oslo_concurrency.lockutils [None req-974091af-24dd-45c7-9716-f72a2a98ab7f tempest-VolumesAssistedSnapshotsTest-588891309 tempest-VolumesAssistedSnapshotsTest-588891309-project-member] Acquiring lock "782f3bee-42b3-4822-a28d-9eb8a6cde1ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 611.988306] env[67008]: DEBUG oslo_concurrency.lockutils [None req-974091af-24dd-45c7-9716-f72a2a98ab7f tempest-VolumesAssistedSnapshotsTest-588891309 tempest-VolumesAssistedSnapshotsTest-588891309-project-member] Lock "782f3bee-42b3-4822-a28d-9eb8a6cde1ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.676106] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5034493d-17d0-4d29-9d2e-64cadd0f7655 tempest-ImagesNegativeTestJSON-1485666449 tempest-ImagesNegativeTestJSON-1485666449-project-member] Acquiring lock "adc51b71-5867-43f2-a947-62e8e733db76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.676464] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5034493d-17d0-4d29-9d2e-64cadd0f7655 tempest-ImagesNegativeTestJSON-1485666449 tempest-ImagesNegativeTestJSON-1485666449-project-member] Lock "adc51b71-5867-43f2-a947-62e8e733db76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 613.467397] env[67008]: DEBUG nova.compute.manager [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Received event network-changed-92b81921-a19a-4cd4-aed4-7c8c2a9217d7 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 613.467397] env[67008]: DEBUG nova.compute.manager [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Refreshing instance network info cache due to event network-changed-92b81921-a19a-4cd4-aed4-7c8c2a9217d7. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 613.467397] env[67008]: DEBUG oslo_concurrency.lockutils [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] Acquiring lock "refresh_cache-6ca0b308-d3d1-49bd-8ce1-813017b3c833" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 613.467397] env[67008]: DEBUG oslo_concurrency.lockutils [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] Acquired lock "refresh_cache-6ca0b308-d3d1-49bd-8ce1-813017b3c833" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 613.467397] env[67008]: DEBUG nova.network.neutron [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Refreshing network info cache for port 92b81921-a19a-4cd4-aed4-7c8c2a9217d7 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 613.994205] env[67008]: DEBUG nova.network.neutron [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Updated VIF entry in instance network info cache for port 92b81921-a19a-4cd4-aed4-7c8c2a9217d7. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 613.994436] env[67008]: DEBUG nova.network.neutron [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Updating instance_info_cache with network_info: [{"id": "92b81921-a19a-4cd4-aed4-7c8c2a9217d7", "address": "fa:16:3e:30:5b:14", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.160", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b81921-a1", "ovs_interfaceid": "92b81921-a19a-4cd4-aed4-7c8c2a9217d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.005410] env[67008]: DEBUG oslo_concurrency.lockutils [req-3327514e-82c3-4e95-aff1-c0a3785bf2b8 req-af951ff5-1fa0-47c4-b757-1fc6b3aa4db4 service nova] Releasing lock "refresh_cache-6ca0b308-d3d1-49bd-8ce1-813017b3c833" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 614.099526] env[67008]: DEBUG oslo_concurrency.lockutils [None req-74cb615e-3297-4bbb-ba17-228c2c907cc6 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "2fef1490-9ac5-4246-8017-f68e512c51dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 614.099762] env[67008]: DEBUG oslo_concurrency.lockutils [None req-74cb615e-3297-4bbb-ba17-228c2c907cc6 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "2fef1490-9ac5-4246-8017-f68e512c51dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 617.812728] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1565f0ba-6ad2-42a8-b11e-a289dcb589d1 tempest-ServersWithSpecificFlavorTestJSON-1136835190 tempest-ServersWithSpecificFlavorTestJSON-1136835190-project-member] Acquiring lock "70e0c15e-d968-46ba-bb97-35d6687e9834" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 617.812728] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1565f0ba-6ad2-42a8-b11e-a289dcb589d1 tempest-ServersWithSpecificFlavorTestJSON-1136835190 tempest-ServersWithSpecificFlavorTestJSON-1136835190-project-member] Lock "70e0c15e-d968-46ba-bb97-35d6687e9834" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 620.756441] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2af9868f-dfeb-44fc-a689-07f0d19bc7f0 tempest-ServersAdmin275Test-1466662230 tempest-ServersAdmin275Test-1466662230-project-member] Acquiring lock "430ea81b-71f9-4074-829a-fd8a6c24098b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 620.757894] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2af9868f-dfeb-44fc-a689-07f0d19bc7f0 tempest-ServersAdmin275Test-1466662230 tempest-ServersAdmin275Test-1466662230-project-member] Lock "430ea81b-71f9-4074-829a-fd8a6c24098b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 621.353058] env[67008]: DEBUG oslo_concurrency.lockutils [None req-fbba38aa-bfa2-4459-ab5d-354d5f161600 tempest-ServersTestJSON-1031739700 tempest-ServersTestJSON-1031739700-project-member] Acquiring lock "83b7b19d-bc72-4ac2-992d-9dda68ff1e4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 621.353294] env[67008]: DEBUG oslo_concurrency.lockutils [None req-fbba38aa-bfa2-4459-ab5d-354d5f161600 tempest-ServersTestJSON-1031739700 tempest-ServersTestJSON-1031739700-project-member] Lock "83b7b19d-bc72-4ac2-992d-9dda68ff1e4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 622.002338] env[67008]: DEBUG oslo_concurrency.lockutils [None req-92132d17-e5fd-412e-84cc-20d0960e7d37 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] Acquiring lock "90f7c936-6d03-4464-8719-12ab257cb714" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 622.002630] env[67008]: DEBUG oslo_concurrency.lockutils [None req-92132d17-e5fd-412e-84cc-20d0960e7d37 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] Lock "90f7c936-6d03-4464-8719-12ab257cb714" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 623.658776] env[67008]: DEBUG oslo_concurrency.lockutils [None req-941774ce-1893-4b38-8692-be5227511fcb tempest-ServersTestBootFromVolume-343587925 tempest-ServersTestBootFromVolume-343587925-project-member] Acquiring lock "e5c8de12-61ca-4bc1-b871-b84cf802e916" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 623.659094] env[67008]: DEBUG oslo_concurrency.lockutils [None req-941774ce-1893-4b38-8692-be5227511fcb tempest-ServersTestBootFromVolume-343587925 tempest-ServersTestBootFromVolume-343587925-project-member] Lock "e5c8de12-61ca-4bc1-b871-b84cf802e916" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 631.957251] env[67008]: DEBUG oslo_concurrency.lockutils [None req-41f894d9-4a9e-47dd-b96f-0ee805883b92 tempest-ServerRescueTestJSONUnderV235-1962325397 tempest-ServerRescueTestJSONUnderV235-1962325397-project-member] Acquiring lock "cf29c18a-c923-46c8-ab02-277c2b5ee4d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 631.957251] env[67008]: DEBUG oslo_concurrency.lockutils [None req-41f894d9-4a9e-47dd-b96f-0ee805883b92 tempest-ServerRescueTestJSONUnderV235-1962325397 tempest-ServerRescueTestJSONUnderV235-1962325397-project-member] Lock "cf29c18a-c923-46c8-ab02-277c2b5ee4d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 634.475648] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3a3e81f2-1f50-4e35-90a0-6e01a7e1864c tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] Acquiring lock "35b9e3ec-c3da-4805-9c0f-7f772d7a3747" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 634.475952] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3a3e81f2-1f50-4e35-90a0-6e01a7e1864c tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] Lock "35b9e3ec-c3da-4805-9c0f-7f772d7a3747" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 637.386558] env[67008]: WARNING oslo_vmware.rw_handles [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 637.386558] env[67008]: ERROR oslo_vmware.rw_handles [ 637.391351] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 637.393568] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 637.394015] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Copying Virtual Disk [datastore1] vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/c0cad18f-123f-49d7-a7c5-db01c68809ce/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 637.394535] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e80d7d58-d591-4b4f-a58b-79deb55b9850 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.405018] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Waiting for the task: (returnval){ [ 637.405018] env[67008]: value = "task-2824862" [ 637.405018] env[67008]: _type = "Task" [ 637.405018] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.415093] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Task: {'id': task-2824862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.920015] env[67008]: DEBUG oslo_vmware.exceptions [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 637.920015] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 637.922705] env[67008]: ERROR nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 637.922705] env[67008]: Faults: ['InvalidArgument'] [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Traceback (most recent call last): [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] yield resources [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self.driver.spawn(context, instance, image_meta, [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self._fetch_image_if_missing(context, vi) [ 637.922705] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] image_cache(vi, tmp_image_ds_loc) [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] vm_util.copy_virtual_disk( [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] session._wait_for_task(vmdk_copy_task) [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] return self.wait_for_task(task_ref) [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] return evt.wait() [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] result = hub.switch() [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 637.923134] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] return self.greenlet.switch() [ 637.923487] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 637.923487] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self.f(*self.args, **self.kw) [ 637.923487] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 637.923487] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] raise exceptions.translate_fault(task_info.error) [ 637.923487] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 637.923487] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Faults: ['InvalidArgument'] [ 637.923487] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] [ 637.923487] env[67008]: INFO nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Terminating instance [ 637.925554] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 637.925554] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.926467] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 637.926467] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 637.926467] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7f6f5bb-e1a3-4f7b-8834-06b586c988d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.928834] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13fe826-8fcc-41a2-aaac-57e4cb0005c2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.938549] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 637.938736] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd58f1ee-d10d-49f0-8f7c-0119c90b8d76 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.941304] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.941560] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 637.944892] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16ba21f3-f5d0-4886-9bf3-f7810e786b72 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.948044] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 637.948044] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5215268e-ee7b-c2da-1474-02e9bafee70c" [ 637.948044] env[67008]: _type = "Task" [ 637.948044] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.958020] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5215268e-ee7b-c2da-1474-02e9bafee70c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.012760] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 638.012982] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 638.013210] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Deleting the datastore file [datastore1] 8724bca5-56d0-4e6e-a178-4f3634d37007 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 638.013603] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c238eba-c14e-41b6-89ff-5e5e595df626 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.025090] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Waiting for the task: (returnval){ [ 638.025090] env[67008]: value = "task-2824864" [ 638.025090] env[67008]: _type = "Task" [ 638.025090] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.039165] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Task: {'id': task-2824864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.462205] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 638.462205] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.462205] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7e9edc8-c69a-4185-afe7-106f84cf43ee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.475804] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.476054] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Fetch image to [datastore1] vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 638.476312] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 638.477273] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbcb82d-4e47-4bf3-9a4d-1ca138a2efd4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.489622] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1de3e22-4cd7-4f43-9578-bbfd76b49baa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.499944] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c033f1-e755-4333-8190-afc8f5a3700c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.543997] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b62a8f2-1ead-4bdb-a1b7-0005d9b56879 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.551236] env[67008]: DEBUG oslo_vmware.api [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Task: {'id': task-2824864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075319} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.553615] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 638.553615] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 638.553615] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 638.553615] env[67008]: INFO nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Took 0.63 seconds to destroy the instance on the hypervisor. [ 638.555930] env[67008]: DEBUG nova.compute.claims [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 638.556111] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 638.556319] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 638.559365] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b1e361a2-49e0-46c1-a2c4-24581b1a54a7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.592029] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 638.657918] env[67008]: DEBUG oslo_vmware.rw_handles [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 638.728330] env[67008]: DEBUG oslo_vmware.rw_handles [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 638.731018] env[67008]: DEBUG oslo_vmware.rw_handles [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 639.200156] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a54b57-f983-4da2-be72-905620a00524 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.208066] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78563752-a592-4a26-93f4-780e28673265 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.247862] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd43d4e-a3c4-4778-b110-7d67e39fac43 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.260728] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d044bdb-12a0-4431-b07c-06b5a1708680 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.281058] env[67008]: DEBUG nova.compute.provider_tree [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.296807] env[67008]: DEBUG nova.scheduler.client.report [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 639.313998] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.757s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 639.315348] env[67008]: ERROR nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 639.315348] env[67008]: Faults: ['InvalidArgument'] [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Traceback (most recent call last): [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self.driver.spawn(context, instance, image_meta, [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self._vmops.spawn(context, instance, image_meta, injected_files, [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self._fetch_image_if_missing(context, vi) [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] image_cache(vi, tmp_image_ds_loc) [ 639.315348] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] vm_util.copy_virtual_disk( [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] session._wait_for_task(vmdk_copy_task) [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] return self.wait_for_task(task_ref) [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] return evt.wait() [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] result = hub.switch() [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] return self.greenlet.switch() [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 639.315824] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] self.f(*self.args, **self.kw) [ 639.316369] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 639.316369] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] raise exceptions.translate_fault(task_info.error) [ 639.316369] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 639.316369] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Faults: ['InvalidArgument'] [ 639.316369] env[67008]: ERROR nova.compute.manager [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] [ 639.316369] env[67008]: DEBUG nova.compute.utils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 639.324957] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Build of instance 8724bca5-56d0-4e6e-a178-4f3634d37007 was re-scheduled: A specified parameter was not correct: fileType [ 639.324957] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 639.325578] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 639.326166] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 639.326166] env[67008]: DEBUG nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 639.326292] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 640.098688] env[67008]: DEBUG nova.network.neutron [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.118094] env[67008]: INFO nova.compute.manager [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] [instance: 8724bca5-56d0-4e6e-a178-4f3634d37007] Took 0.79 seconds to deallocate network for instance. [ 640.252883] env[67008]: INFO nova.scheduler.client.report [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Deleted allocations for instance 8724bca5-56d0-4e6e-a178-4f3634d37007 [ 640.282628] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b1f8b9ed-9232-4455-9673-26a2c82a2ce9 tempest-ServerDiagnosticsTest-102411225 tempest-ServerDiagnosticsTest-102411225-project-member] Lock "8724bca5-56d0-4e6e-a178-4f3634d37007" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.589s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 640.312090] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 640.392844] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 640.392844] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 640.394057] env[67008]: INFO nova.compute.claims [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.881124] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea429df-df58-4b51-a767-09f2edb01ad7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.889205] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ebf0e7-2bf6-4caf-851b-159d32d3ab43 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.922294] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb77a7fb-1e01-4737-bd69-87cdc589e54b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.932320] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11452efb-cb5d-44b5-8c25-a0c0918e9bcf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.949321] env[67008]: DEBUG nova.compute.provider_tree [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.961245] env[67008]: DEBUG nova.scheduler.client.report [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 640.984649] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.592s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 640.985202] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 641.032875] env[67008]: DEBUG nova.compute.utils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.034173] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 641.034353] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 641.044317] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 641.127713] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 641.144747] env[67008]: DEBUG nova.policy [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a652dd6123245bb80c43e44f9aa0e18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd07d3e56959e4656b4b5b221332ac173', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 641.156111] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 641.156111] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 641.156111] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.156428] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 641.156428] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.156428] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 641.156428] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 641.156428] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 641.156600] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 641.156600] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 641.156600] env[67008]: DEBUG nova.virt.hardware [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 641.156600] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad9fb38-6d23-4696-9ce5-22dcf5ebb501 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.167613] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1745a37-080b-4374-8859-6412cb3ed30c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.680258] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 641.680489] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 641.918940] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Successfully created port: 7f7e423b-39e3-4251-b8e6-764e1c967943 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.242773] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ecca436e-9738-4024-87e1-73f2325d675e tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Acquiring lock "04badd98-b2f0-483d-82e9-5806dbf8edb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 642.243143] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ecca436e-9738-4024-87e1-73f2325d675e tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "04badd98-b2f0-483d-82e9-5806dbf8edb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 643.298932] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Successfully updated port: 7f7e423b-39e3-4251-b8e6-764e1c967943 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 643.314867] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "refresh_cache-40a26f4e-0be9-4770-83a7-31c87dbf921f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 643.315023] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquired lock "refresh_cache-40a26f4e-0be9-4770-83a7-31c87dbf921f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 643.315153] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.375551] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.753032] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Updating instance_info_cache with network_info: [{"id": "7f7e423b-39e3-4251-b8e6-764e1c967943", "address": "fa:16:3e:da:bf:de", "network": {"id": "8123d6cd-08ca-4ba2-960b-a0646cbeb278", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-867262659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d07d3e56959e4656b4b5b221332ac173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7e423b-39", "ovs_interfaceid": "7f7e423b-39e3-4251-b8e6-764e1c967943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.771957] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Releasing lock "refresh_cache-40a26f4e-0be9-4770-83a7-31c87dbf921f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 643.772649] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Instance network_info: |[{"id": "7f7e423b-39e3-4251-b8e6-764e1c967943", "address": "fa:16:3e:da:bf:de", "network": {"id": "8123d6cd-08ca-4ba2-960b-a0646cbeb278", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-867262659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d07d3e56959e4656b4b5b221332ac173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7e423b-39", "ovs_interfaceid": "7f7e423b-39e3-4251-b8e6-764e1c967943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 643.774674] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:bf:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '81d39ad2-4e62-4f09-a567-88ac5aa70467', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f7e423b-39e3-4251-b8e6-764e1c967943', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.782158] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Creating folder: Project (d07d3e56959e4656b4b5b221332ac173). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 643.782926] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0cb5cc69-b874-46d4-abf4-442435c0c996 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.793651] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Created folder: Project (d07d3e56959e4656b4b5b221332ac173) in parent group-v567993. [ 643.794130] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Creating folder: Instances. Parent ref: group-v568026. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 643.794443] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6015c85f-e607-4c9e-b600-b3f5b80c10f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.805129] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Created folder: Instances in parent group-v568026. [ 643.805384] env[67008]: DEBUG oslo.service.loopingcall [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.805617] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 643.805834] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0df0c689-2ba3-448e-a502-4d3c196486c1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.828342] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.828342] env[67008]: value = "task-2824867" [ 643.828342] env[67008]: _type = "Task" [ 643.828342] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.838853] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824867, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.294827] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aeac8b4b-1157-44dd-b431-f96eba868b0c tempest-AttachInterfacesV270Test-777158603 tempest-AttachInterfacesV270Test-777158603-project-member] Acquiring lock "72df790c-0cd5-4054-9162-9f9bd3d19239" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 644.294827] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aeac8b4b-1157-44dd-b431-f96eba868b0c tempest-AttachInterfacesV270Test-777158603 tempest-AttachInterfacesV270Test-777158603-project-member] Lock "72df790c-0cd5-4054-9162-9f9bd3d19239" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 644.343200] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824867, 'name': CreateVM_Task, 'duration_secs': 0.362392} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.343449] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 644.345401] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 644.345401] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 644.345401] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 644.345932] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f38168df-7d0e-4ebf-89d9-0fa950b51066 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.350880] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Waiting for the task: (returnval){ [ 644.350880] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]528d8bf7-ea2f-a82f-cdfb-3bdcc73405af" [ 644.350880] env[67008]: _type = "Task" [ 644.350880] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.359363] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]528d8bf7-ea2f-a82f-cdfb-3bdcc73405af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.860946] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 644.863378] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.863378] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 645.148877] env[67008]: DEBUG nova.compute.manager [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Received event network-vif-plugged-7f7e423b-39e3-4251-b8e6-764e1c967943 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 645.149101] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] Acquiring lock "40a26f4e-0be9-4770-83a7-31c87dbf921f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 645.149744] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 645.149986] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 645.150186] env[67008]: DEBUG nova.compute.manager [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] No waiting events found dispatching network-vif-plugged-7f7e423b-39e3-4251-b8e6-764e1c967943 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 645.150883] env[67008]: WARNING nova.compute.manager [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Received unexpected event network-vif-plugged-7f7e423b-39e3-4251-b8e6-764e1c967943 for instance with vm_state building and task_state spawning. [ 645.152927] env[67008]: DEBUG nova.compute.manager [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Received event network-changed-7f7e423b-39e3-4251-b8e6-764e1c967943 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 645.153298] env[67008]: DEBUG nova.compute.manager [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Refreshing instance network info cache due to event network-changed-7f7e423b-39e3-4251-b8e6-764e1c967943. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 645.153619] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] Acquiring lock "refresh_cache-40a26f4e-0be9-4770-83a7-31c87dbf921f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 645.153706] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] Acquired lock "refresh_cache-40a26f4e-0be9-4770-83a7-31c87dbf921f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 645.153895] env[67008]: DEBUG nova.network.neutron [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Refreshing network info cache for port 7f7e423b-39e3-4251-b8e6-764e1c967943 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 645.982195] env[67008]: DEBUG nova.network.neutron [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Updated VIF entry in instance network info cache for port 7f7e423b-39e3-4251-b8e6-764e1c967943. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 645.982621] env[67008]: DEBUG nova.network.neutron [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Updating instance_info_cache with network_info: [{"id": "7f7e423b-39e3-4251-b8e6-764e1c967943", "address": "fa:16:3e:da:bf:de", "network": {"id": "8123d6cd-08ca-4ba2-960b-a0646cbeb278", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-867262659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d07d3e56959e4656b4b5b221332ac173", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "81d39ad2-4e62-4f09-a567-88ac5aa70467", "external-id": "nsx-vlan-transportzone-221", "segmentation_id": 221, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7e423b-39", "ovs_interfaceid": "7f7e423b-39e3-4251-b8e6-764e1c967943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.993135] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c6098bd-b604-41a7-b641-bdd98734915b req-7a045026-e006-4faf-90a6-d522f5fe20b9 service nova] Releasing lock "refresh_cache-40a26f4e-0be9-4770-83a7-31c87dbf921f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 646.576393] env[67008]: DEBUG oslo_concurrency.lockutils [None req-48b61275-1700-43a6-9cd3-acbce1e16a8e tempest-ServerAddressesTestJSON-1658310963 tempest-ServerAddressesTestJSON-1658310963-project-member] Acquiring lock "cb387fbf-affd-4d9f-a4ef-7eef58847130" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 646.576575] env[67008]: DEBUG oslo_concurrency.lockutils [None req-48b61275-1700-43a6-9cd3-acbce1e16a8e tempest-ServerAddressesTestJSON-1658310963 tempest-ServerAddressesTestJSON-1658310963-project-member] Lock "cb387fbf-affd-4d9f-a4ef-7eef58847130" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 648.830716] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f3ba1226-5924-4d15-a289-c6ee3ce5f4c9 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "30fbf80d-2893-49cb-b4e8-456d08ce4e3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 648.831025] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f3ba1226-5924-4d15-a289-c6ee3ce5f4c9 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "30fbf80d-2893-49cb-b4e8-456d08ce4e3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 652.331155] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.359352] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.359546] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.359698] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.359838] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.359981] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 652.377123] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 652.377409] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 652.377605] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 652.377980] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 652.379783] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8acd16-2659-4073-a703-ed2c830f863e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.390322] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4327b20-50d1-4af3-83c6-34b1784d2e4f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.409455] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff53f32b-ab1d-4664-ae3f-bfc574999453 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.417614] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc36e55-c740-4257-81a7-9818c261d71d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.452608] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181041MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 652.452911] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 652.453151] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 652.564901] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 668ce119-c0ee-4996-ae4a-bbe0a788cab5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.565131] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e5e1de51-40a9-4b43-b885-87501738dc96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.565264] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cc257ece-4b3e-45c8-a1a7-69330848ad89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.566297] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.566297] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.566297] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5472df57-f2bc-4a90-9251-13760f932d77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.566297] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.566482] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.566482] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.566482] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 652.585784] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.616599] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.632712] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.645692] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 782f3bee-42b3-4822-a28d-9eb8a6cde1ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.658215] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance adc51b71-5867-43f2-a947-62e8e733db76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.668606] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2fef1490-9ac5-4246-8017-f68e512c51dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.681454] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 70e0c15e-d968-46ba-bb97-35d6687e9834 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.694230] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 430ea81b-71f9-4074-829a-fd8a6c24098b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.706688] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83b7b19d-bc72-4ac2-992d-9dda68ff1e4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.718018] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 90f7c936-6d03-4464-8719-12ab257cb714 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.728813] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e5c8de12-61ca-4bc1-b871-b84cf802e916 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.740520] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cf29c18a-c923-46c8-ab02-277c2b5ee4d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.755535] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 35b9e3ec-c3da-4805-9c0f-7f772d7a3747 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.767322] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.780013] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 04badd98-b2f0-483d-82e9-5806dbf8edb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.793340] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72df790c-0cd5-4054-9162-9f9bd3d19239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.804207] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cb387fbf-affd-4d9f-a4ef-7eef58847130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.821903] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 30fbf80d-2893-49cb-b4e8-456d08ce4e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 652.822172] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 652.822320] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 653.229981] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bb7796-df3a-42fc-806b-7ae0afb25b1d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.238250] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40a4c23-4c83-4690-b045-9d287d9dab6c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.267074] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b76e23-f092-4d71-a072-73c9081de73d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.274381] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4acace-70f2-443e-81e4-9d697442377f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.287286] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.296914] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 653.317216] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 653.317407] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.864s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 653.814357] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.814624] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.815192] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 653.815192] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 653.842568] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.842688] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843156] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843156] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843156] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843156] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843610] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843610] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843610] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843610] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 653.843816] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 653.844253] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.844394] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 653.860046] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.975010] env[67008]: DEBUG oslo_concurrency.lockutils [None req-56243726-f421-4a84-ae4c-a9b277a69661 tempest-ServerDiagnosticsV248Test-1232884913 tempest-ServerDiagnosticsV248Test-1232884913-project-member] Acquiring lock "2308fd77-6032-4e59-9074-1c18e9b02d87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 653.975281] env[67008]: DEBUG oslo_concurrency.lockutils [None req-56243726-f421-4a84-ae4c-a9b277a69661 tempest-ServerDiagnosticsV248Test-1232884913 tempest-ServerDiagnosticsV248Test-1232884913-project-member] Lock "2308fd77-6032-4e59-9074-1c18e9b02d87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 659.583896] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b611e079-d5b8-4d6d-83af-d781328f665f tempest-ServerGroupTestJSON-1323841408 tempest-ServerGroupTestJSON-1323841408-project-member] Acquiring lock "80863f1b-9c19-4fcc-8692-6015d623e011" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 659.584263] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b611e079-d5b8-4d6d-83af-d781328f665f tempest-ServerGroupTestJSON-1323841408 tempest-ServerGroupTestJSON-1323841408-project-member] Lock "80863f1b-9c19-4fcc-8692-6015d623e011" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 667.850140] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6af15646-b8de-4866-a2c2-2177785b727b tempest-ServerActionsTestJSON-1407308620 tempest-ServerActionsTestJSON-1407308620-project-member] Acquiring lock "411f1932-4397-4aaa-ab21-55ff90342fbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 667.850481] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6af15646-b8de-4866-a2c2-2177785b727b tempest-ServerActionsTestJSON-1407308620 tempest-ServerActionsTestJSON-1407308620-project-member] Lock "411f1932-4397-4aaa-ab21-55ff90342fbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 684.423719] env[67008]: WARNING oslo_vmware.rw_handles [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 684.423719] env[67008]: ERROR oslo_vmware.rw_handles [ 684.424314] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 684.425828] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 684.426104] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Copying Virtual Disk [datastore1] vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/3759473f-01bd-4438-992b-93443a0c663f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 684.426408] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea292434-e54c-44ef-a85b-a6a3629a84bb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.435919] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 684.435919] env[67008]: value = "task-2824868" [ 684.435919] env[67008]: _type = "Task" [ 684.435919] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.443912] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824868, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.946647] env[67008]: DEBUG oslo_vmware.exceptions [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 684.946647] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 684.947817] env[67008]: ERROR nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 684.947817] env[67008]: Faults: ['InvalidArgument'] [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Traceback (most recent call last): [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] yield resources [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self.driver.spawn(context, instance, image_meta, [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self._fetch_image_if_missing(context, vi) [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 684.947817] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] image_cache(vi, tmp_image_ds_loc) [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] vm_util.copy_virtual_disk( [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] session._wait_for_task(vmdk_copy_task) [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] return self.wait_for_task(task_ref) [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] return evt.wait() [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] result = hub.switch() [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] return self.greenlet.switch() [ 684.948209] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 684.948510] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self.f(*self.args, **self.kw) [ 684.948510] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 684.948510] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] raise exceptions.translate_fault(task_info.error) [ 684.948510] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 684.948510] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Faults: ['InvalidArgument'] [ 684.948510] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] [ 684.948510] env[67008]: INFO nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Terminating instance [ 684.948875] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 684.949095] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.949346] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3512b774-0b74-40dc-8030-4a8dcc587cfb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.951728] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 684.951914] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 684.952646] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0120b9-fc82-4892-8f2e-502239bee559 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.959178] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 684.959381] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28818d3b-dbc2-42b9-9e6c-ba80b47e9313 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.961473] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.961638] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 684.962566] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b37708fb-209c-4957-a253-bc85cb192645 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.967258] env[67008]: DEBUG oslo_vmware.api [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Waiting for the task: (returnval){ [ 684.967258] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]521efd7f-fdc4-4ec5-809d-481c268e943e" [ 684.967258] env[67008]: _type = "Task" [ 684.967258] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.981726] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 684.981949] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Creating directory with path [datastore1] vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.982184] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-702a6c76-5105-4311-8519-2927d74eb404 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.994395] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Created directory with path [datastore1] vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.994599] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Fetch image to [datastore1] vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 684.994792] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 684.995550] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8097f5d5-2cd3-4b4d-82a4-1b365439e01b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.003463] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a08b3f-a614-4250-8c7b-341c1ee701c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.012722] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829ea785-b6f4-4704-859b-1c6c6479b6bb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.045100] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ac55df-9995-44d7-a899-381fe2aa5fb1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.047712] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 685.047923] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 685.048112] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleting the datastore file [datastore1] 668ce119-c0ee-4996-ae4a-bbe0a788cab5 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.048345] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1035d4c0-1f32-4944-8176-ae370e4801c4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.054526] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-338bf5e8-dd90-4f15-8bc0-a44f4b779197 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.056217] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 685.056217] env[67008]: value = "task-2824870" [ 685.056217] env[67008]: _type = "Task" [ 685.056217] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.063648] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.073837] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 685.125301] env[67008]: DEBUG oslo_vmware.rw_handles [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 685.184092] env[67008]: DEBUG oslo_vmware.rw_handles [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 685.184324] env[67008]: DEBUG oslo_vmware.rw_handles [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 685.566066] env[67008]: DEBUG oslo_vmware.api [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090231} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.566365] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.566491] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 685.566656] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.566824] env[67008]: INFO nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 685.568900] env[67008]: DEBUG nova.compute.claims [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 685.569084] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 685.569294] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 685.985294] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0533eb03-242d-4ed5-8df5-af1c887176bc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.992966] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5991f927-95c3-4909-8c62-d1475634a465 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.024036] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1405cabf-04bb-42e8-9981-4fb92fc856e4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.031227] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d87a625-1667-4131-88d4-e83aa2a6650a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.044199] env[67008]: DEBUG nova.compute.provider_tree [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.052843] env[67008]: DEBUG nova.scheduler.client.report [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 686.067771] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.498s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 686.068310] env[67008]: ERROR nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.068310] env[67008]: Faults: ['InvalidArgument'] [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Traceback (most recent call last): [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self.driver.spawn(context, instance, image_meta, [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self._fetch_image_if_missing(context, vi) [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] image_cache(vi, tmp_image_ds_loc) [ 686.068310] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] vm_util.copy_virtual_disk( [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] session._wait_for_task(vmdk_copy_task) [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] return self.wait_for_task(task_ref) [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] return evt.wait() [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] result = hub.switch() [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] return self.greenlet.switch() [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 686.068706] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] self.f(*self.args, **self.kw) [ 686.068962] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 686.068962] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] raise exceptions.translate_fault(task_info.error) [ 686.068962] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 686.068962] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Faults: ['InvalidArgument'] [ 686.068962] env[67008]: ERROR nova.compute.manager [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] [ 686.069094] env[67008]: DEBUG nova.compute.utils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.070736] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Build of instance 668ce119-c0ee-4996-ae4a-bbe0a788cab5 was re-scheduled: A specified parameter was not correct: fileType [ 686.070736] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 686.071157] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 686.071301] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 686.071473] env[67008]: DEBUG nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 686.071652] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 686.744140] env[67008]: DEBUG nova.network.neutron [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.754345] env[67008]: INFO nova.compute.manager [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 668ce119-c0ee-4996-ae4a-bbe0a788cab5] Took 0.68 seconds to deallocate network for instance. [ 686.855848] env[67008]: INFO nova.scheduler.client.report [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted allocations for instance 668ce119-c0ee-4996-ae4a-bbe0a788cab5 [ 686.876079] env[67008]: DEBUG oslo_concurrency.lockutils [None req-66d767ff-f79d-4dd7-8777-72d135a65066 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "668ce119-c0ee-4996-ae4a-bbe0a788cab5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.369s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 686.891166] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 686.941158] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 686.941481] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 686.943040] env[67008]: INFO nova.compute.claims [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.346147] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6806702-53d5-436c-8217-4ac75932d897 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.353684] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdefb324-47e5-425a-ad36-783ea8ca6e47 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.383092] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c17e31b-dfec-4a51-a0d2-b91ac0b007ec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.390327] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead96072-bc04-48e4-92d0-d11affe42885 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.403434] env[67008]: DEBUG nova.compute.provider_tree [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.412519] env[67008]: DEBUG nova.scheduler.client.report [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 687.428234] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.487s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 687.428753] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 687.465621] env[67008]: DEBUG nova.compute.utils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.469671] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 687.469671] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 687.478128] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 687.549587] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 687.553066] env[67008]: DEBUG nova.policy [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ccd0129db836426e8d0a73278921466c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6ecfed59a9d441de898ef80697e3c3b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 687.574169] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 687.574445] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 687.574602] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.574779] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 687.574924] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.575082] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 687.575290] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 687.575446] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 687.575608] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 687.575765] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 687.575929] env[67008]: DEBUG nova.virt.hardware [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 687.576781] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfffe758-aa74-48fb-af0d-3bd7bef0ba20 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.584664] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39a560a-208b-4606-b71f-d0d59e98fd34 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.089280] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Successfully created port: 8c09c9a9-7b9e-4afa-8782-418771dbb105 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.275396] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Successfully updated port: 8c09c9a9-7b9e-4afa-8782-418771dbb105 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.287162] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "refresh_cache-f5fce891-3c35-415e-9d09-c5c8dca3dde3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 689.288300] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquired lock "refresh_cache-f5fce891-3c35-415e-9d09-c5c8dca3dde3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 689.288691] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 689.363742] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.695934] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Updating instance_info_cache with network_info: [{"id": "8c09c9a9-7b9e-4afa-8782-418771dbb105", "address": "fa:16:3e:56:7a:81", "network": {"id": "565a2ef1-2100-4970-9cfe-4ba2a89e7ce7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1320295138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6ecfed59a9d441de898ef80697e3c3b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c09c9a9-7b", "ovs_interfaceid": "8c09c9a9-7b9e-4afa-8782-418771dbb105", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.715135] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Releasing lock "refresh_cache-f5fce891-3c35-415e-9d09-c5c8dca3dde3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 689.715471] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Instance network_info: |[{"id": "8c09c9a9-7b9e-4afa-8782-418771dbb105", "address": "fa:16:3e:56:7a:81", "network": {"id": "565a2ef1-2100-4970-9cfe-4ba2a89e7ce7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1320295138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6ecfed59a9d441de898ef80697e3c3b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c09c9a9-7b", "ovs_interfaceid": "8c09c9a9-7b9e-4afa-8782-418771dbb105", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 689.715852] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:7a:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '91712705-510f-41a0-a803-2ecd92b676e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c09c9a9-7b9e-4afa-8782-418771dbb105', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.723141] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Creating folder: Project (6ecfed59a9d441de898ef80697e3c3b0). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 689.723665] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d00457c-14f3-4b78-97ad-3460f7d9a660 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.734821] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Created folder: Project (6ecfed59a9d441de898ef80697e3c3b0) in parent group-v567993. [ 689.734996] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Creating folder: Instances. Parent ref: group-v568029. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 689.735225] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a97ad346-420f-4e7e-bce8-cc4ced96634b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.744313] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Created folder: Instances in parent group-v568029. [ 689.744538] env[67008]: DEBUG oslo.service.loopingcall [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.744716] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 689.744906] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59a5916e-9cb5-476f-89f5-4ee43e98f97f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.763411] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.763411] env[67008]: value = "task-2824873" [ 689.763411] env[67008]: _type = "Task" [ 689.763411] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.770678] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824873, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.880385] env[67008]: DEBUG nova.compute.manager [req-9125a384-9235-4d4d-a487-4b9297660221 req-ca147fb2-db72-4952-aab2-9df792bc4109 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Received event network-vif-plugged-8c09c9a9-7b9e-4afa-8782-418771dbb105 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 689.880631] env[67008]: DEBUG oslo_concurrency.lockutils [req-9125a384-9235-4d4d-a487-4b9297660221 req-ca147fb2-db72-4952-aab2-9df792bc4109 service nova] Acquiring lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 689.880834] env[67008]: DEBUG oslo_concurrency.lockutils [req-9125a384-9235-4d4d-a487-4b9297660221 req-ca147fb2-db72-4952-aab2-9df792bc4109 service nova] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 689.881008] env[67008]: DEBUG oslo_concurrency.lockutils [req-9125a384-9235-4d4d-a487-4b9297660221 req-ca147fb2-db72-4952-aab2-9df792bc4109 service nova] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 689.881186] env[67008]: DEBUG nova.compute.manager [req-9125a384-9235-4d4d-a487-4b9297660221 req-ca147fb2-db72-4952-aab2-9df792bc4109 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] No waiting events found dispatching network-vif-plugged-8c09c9a9-7b9e-4afa-8782-418771dbb105 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 689.881346] env[67008]: WARNING nova.compute.manager [req-9125a384-9235-4d4d-a487-4b9297660221 req-ca147fb2-db72-4952-aab2-9df792bc4109 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Received unexpected event network-vif-plugged-8c09c9a9-7b9e-4afa-8782-418771dbb105 for instance with vm_state building and task_state spawning. [ 690.115667] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "95604dd8-b797-440e-a844-af44609faa61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 690.115927] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "95604dd8-b797-440e-a844-af44609faa61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 690.274685] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824873, 'name': CreateVM_Task, 'duration_secs': 0.340186} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.274907] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 690.275617] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 690.275839] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 690.276110] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 690.276362] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fa2c430-6a16-430c-9e41-60130cfe6637 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.280966] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Waiting for the task: (returnval){ [ 690.280966] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d20a68-cf91-3238-fccf-4c5dddb31f04" [ 690.280966] env[67008]: _type = "Task" [ 690.280966] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.289061] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d20a68-cf91-3238-fccf-4c5dddb31f04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.790656] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 690.790908] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.791135] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 691.910486] env[67008]: DEBUG nova.compute.manager [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Received event network-changed-8c09c9a9-7b9e-4afa-8782-418771dbb105 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 691.910734] env[67008]: DEBUG nova.compute.manager [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Refreshing instance network info cache due to event network-changed-8c09c9a9-7b9e-4afa-8782-418771dbb105. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 691.910969] env[67008]: DEBUG oslo_concurrency.lockutils [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] Acquiring lock "refresh_cache-f5fce891-3c35-415e-9d09-c5c8dca3dde3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 691.911043] env[67008]: DEBUG oslo_concurrency.lockutils [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] Acquired lock "refresh_cache-f5fce891-3c35-415e-9d09-c5c8dca3dde3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 691.911398] env[67008]: DEBUG nova.network.neutron [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Refreshing network info cache for port 8c09c9a9-7b9e-4afa-8782-418771dbb105 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 692.555159] env[67008]: DEBUG nova.network.neutron [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Updated VIF entry in instance network info cache for port 8c09c9a9-7b9e-4afa-8782-418771dbb105. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 692.555554] env[67008]: DEBUG nova.network.neutron [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Updating instance_info_cache with network_info: [{"id": "8c09c9a9-7b9e-4afa-8782-418771dbb105", "address": "fa:16:3e:56:7a:81", "network": {"id": "565a2ef1-2100-4970-9cfe-4ba2a89e7ce7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1320295138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6ecfed59a9d441de898ef80697e3c3b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "91712705-510f-41a0-a803-2ecd92b676e1", "external-id": "nsx-vlan-transportzone-512", "segmentation_id": 512, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c09c9a9-7b", "ovs_interfaceid": "8c09c9a9-7b9e-4afa-8782-418771dbb105", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.564291] env[67008]: DEBUG oslo_concurrency.lockutils [req-4e1ae98b-f44a-4adf-96ae-719e47d1bc39 req-a2b97551-8e7b-4c1c-a8e3-242ed284e868 service nova] Releasing lock "refresh_cache-f5fce891-3c35-415e-9d09-c5c8dca3dde3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 711.857171] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.857468] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.857597] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.869430] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 711.869655] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 711.869857] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 711.870030] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 711.871382] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc68ca35-6ef9-462c-a3b1-b8fc98843567 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.880017] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07235f55-9a76-4c4e-acc3-e8f5f8edb00d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.894148] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80faf9af-0e0c-4fce-b163-bab82232e5f5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.900888] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd01653-76d2-4bec-975f-53aa1044dddf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.930724] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181079MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 711.930871] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 711.931074] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 712.020188] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e5e1de51-40a9-4b43-b885-87501738dc96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.020947] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cc257ece-4b3e-45c8-a1a7-69330848ad89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.022570] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.022570] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.022570] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5472df57-f2bc-4a90-9251-13760f932d77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.022671] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.022707] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.022827] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.022941] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.023065] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.033834] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.045100] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.056271] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 782f3bee-42b3-4822-a28d-9eb8a6cde1ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.066237] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance adc51b71-5867-43f2-a947-62e8e733db76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.075413] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2fef1490-9ac5-4246-8017-f68e512c51dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.087863] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 70e0c15e-d968-46ba-bb97-35d6687e9834 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.098144] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 430ea81b-71f9-4074-829a-fd8a6c24098b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.108429] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83b7b19d-bc72-4ac2-992d-9dda68ff1e4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.117909] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 90f7c936-6d03-4464-8719-12ab257cb714 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.127546] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e5c8de12-61ca-4bc1-b871-b84cf802e916 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.136615] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cf29c18a-c923-46c8-ab02-277c2b5ee4d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.146277] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 35b9e3ec-c3da-4805-9c0f-7f772d7a3747 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.156539] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.166697] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 04badd98-b2f0-483d-82e9-5806dbf8edb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.176501] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72df790c-0cd5-4054-9162-9f9bd3d19239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.186174] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cb387fbf-affd-4d9f-a4ef-7eef58847130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.196101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 30fbf80d-2893-49cb-b4e8-456d08ce4e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.205832] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2308fd77-6032-4e59-9074-1c18e9b02d87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.215345] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 80863f1b-9c19-4fcc-8692-6015d623e011 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.224283] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 411f1932-4397-4aaa-ab21-55ff90342fbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.233676] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.233901] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 712.234082] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 712.567131] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40e7a81-b61d-40ef-a12c-c4c110283bf0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.574887] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d936d8fe-f0cc-4b47-a5e4-f92e98448d6e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.605986] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc23b1-4442-4528-9369-0eef0e19b692 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.611990] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8ac5db-b8d4-4918-94c0-e1b67a8f1b75 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.624897] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.637504] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 712.653406] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 712.653594] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.723s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 713.653051] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 713.856444] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 713.856709] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 713.856860] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 714.851734] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 714.856754] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 714.856754] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 714.856754] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 714.875874] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.876102] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.876278] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.876447] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.876609] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.876792] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.876972] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.877151] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.877308] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.877496] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 714.877633] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 715.856562] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 734.439942] env[67008]: WARNING oslo_vmware.rw_handles [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 734.439942] env[67008]: ERROR oslo_vmware.rw_handles [ 734.440517] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 734.442214] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 734.442488] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Copying Virtual Disk [datastore1] vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/4bf8cfac-97ef-4a8c-a290-0214f7062782/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 734.442805] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3c29ef2-03af-4459-ae26-f7b73f3d2ad2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.450633] env[67008]: DEBUG oslo_vmware.api [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Waiting for the task: (returnval){ [ 734.450633] env[67008]: value = "task-2824874" [ 734.450633] env[67008]: _type = "Task" [ 734.450633] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.458579] env[67008]: DEBUG oslo_vmware.api [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Task: {'id': task-2824874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.961539] env[67008]: DEBUG oslo_vmware.exceptions [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 734.961829] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 734.962385] env[67008]: ERROR nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 734.962385] env[67008]: Faults: ['InvalidArgument'] [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Traceback (most recent call last): [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] yield resources [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self.driver.spawn(context, instance, image_meta, [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self._fetch_image_if_missing(context, vi) [ 734.962385] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] image_cache(vi, tmp_image_ds_loc) [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] vm_util.copy_virtual_disk( [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] session._wait_for_task(vmdk_copy_task) [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] return self.wait_for_task(task_ref) [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] return evt.wait() [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] result = hub.switch() [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 734.962718] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] return self.greenlet.switch() [ 734.963079] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 734.963079] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self.f(*self.args, **self.kw) [ 734.963079] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 734.963079] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] raise exceptions.translate_fault(task_info.error) [ 734.963079] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 734.963079] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Faults: ['InvalidArgument'] [ 734.963079] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] [ 734.963079] env[67008]: INFO nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Terminating instance [ 734.964270] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 734.964467] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.965102] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 734.965287] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 734.965507] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29cf3186-0458-4069-94f5-51a04662f12a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.968088] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544f3f79-839b-4b21-b145-32f7b8c9cd71 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.974956] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 734.975181] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea0d43fd-6267-4f71-9db7-9befaf6e8722 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.977377] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.977551] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 734.978487] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-555a4301-5dc5-423e-80a9-433d01176c72 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.982931] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Waiting for the task: (returnval){ [ 734.982931] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52587719-41b6-8ad0-c979-2f0d88f7f229" [ 734.982931] env[67008]: _type = "Task" [ 734.982931] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.990147] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52587719-41b6-8ad0-c979-2f0d88f7f229, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.063524] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 735.063766] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 735.063948] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Deleting the datastore file [datastore1] cc257ece-4b3e-45c8-a1a7-69330848ad89 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 735.064236] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-803740b4-f415-478b-bbf6-d02854eb0d3a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.071106] env[67008]: DEBUG oslo_vmware.api [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Waiting for the task: (returnval){ [ 735.071106] env[67008]: value = "task-2824876" [ 735.071106] env[67008]: _type = "Task" [ 735.071106] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.080018] env[67008]: DEBUG oslo_vmware.api [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Task: {'id': task-2824876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.497029] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 735.497329] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Creating directory with path [datastore1] vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.497452] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-614f3360-a227-47b5-a0b6-f5307c22fc9a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.509415] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Created directory with path [datastore1] vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.509608] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Fetch image to [datastore1] vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 735.509779] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 735.510523] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98571622-8816-4d9c-9e9f-a532dbcc6c8b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.518075] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f57fe8a-8418-457d-bfab-e8a0ee49276e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.526692] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f4f68e-2bc2-494d-889d-707b1e31d793 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.557068] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c22849-b779-49e6-9f38-4f399c3fedde {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.561987] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4a74c3e3-359c-4ac1-947e-29800bd03f3d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.579873] env[67008]: DEBUG oslo_vmware.api [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Task: {'id': task-2824876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066345} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.580172] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.580357] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 735.580608] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 735.580740] env[67008]: INFO nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Took 0.62 seconds to destroy the instance on the hypervisor. [ 735.582873] env[67008]: DEBUG nova.compute.claims [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 735.583080] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 735.583360] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 735.593649] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 735.647991] env[67008]: DEBUG oslo_vmware.rw_handles [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 735.706782] env[67008]: DEBUG oslo_vmware.rw_handles [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 735.706955] env[67008]: DEBUG oslo_vmware.rw_handles [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 736.025196] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9d8086-af31-4c6b-b420-3f8d3078d075 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.033035] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f70df6-6f85-4163-824e-88454c34e30d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.064470] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fca42c-4dbc-489a-ab08-4e25a6151370 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.071515] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e806dac4-f0ab-4c0d-ad01-8ef3720fb72e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.084500] env[67008]: DEBUG nova.compute.provider_tree [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.093204] env[67008]: DEBUG nova.scheduler.client.report [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 736.106422] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.523s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 736.106938] env[67008]: ERROR nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.106938] env[67008]: Faults: ['InvalidArgument'] [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Traceback (most recent call last): [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self.driver.spawn(context, instance, image_meta, [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self._fetch_image_if_missing(context, vi) [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] image_cache(vi, tmp_image_ds_loc) [ 736.106938] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] vm_util.copy_virtual_disk( [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] session._wait_for_task(vmdk_copy_task) [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] return self.wait_for_task(task_ref) [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] return evt.wait() [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] result = hub.switch() [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] return self.greenlet.switch() [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 736.107288] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] self.f(*self.args, **self.kw) [ 736.107683] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 736.107683] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] raise exceptions.translate_fault(task_info.error) [ 736.107683] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 736.107683] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Faults: ['InvalidArgument'] [ 736.107683] env[67008]: ERROR nova.compute.manager [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] [ 736.107683] env[67008]: DEBUG nova.compute.utils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 736.109146] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Build of instance cc257ece-4b3e-45c8-a1a7-69330848ad89 was re-scheduled: A specified parameter was not correct: fileType [ 736.109146] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 736.109433] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 736.109611] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 736.109771] env[67008]: DEBUG nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 736.109929] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 736.555884] env[67008]: DEBUG nova.network.neutron [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.564285] env[67008]: INFO nova.compute.manager [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] [instance: cc257ece-4b3e-45c8-a1a7-69330848ad89] Took 0.45 seconds to deallocate network for instance. [ 736.655299] env[67008]: INFO nova.scheduler.client.report [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Deleted allocations for instance cc257ece-4b3e-45c8-a1a7-69330848ad89 [ 736.686037] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e4abc2bd-d042-4172-ab97-4e5de70b8420 tempest-ImagesOneServerNegativeTestJSON-1435480157 tempest-ImagesOneServerNegativeTestJSON-1435480157-project-member] Lock "cc257ece-4b3e-45c8-a1a7-69330848ad89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 150.181s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 736.698197] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 736.750158] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 736.750423] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 736.751878] env[67008]: INFO nova.compute.claims [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.140312] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11aa053c-b0b1-47bc-b091-21ee5b81643a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.148448] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e167193a-e1a0-4c9c-9cbb-a15213c8f6ed {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.178571] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0df0e7d-db77-4631-aa03-df97009ec50e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.185798] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e48abd-d987-4cc6-b337-8493f78618d4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.199244] env[67008]: DEBUG nova.compute.provider_tree [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.208760] env[67008]: DEBUG nova.scheduler.client.report [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 737.222099] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.472s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 737.222593] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 737.261273] env[67008]: DEBUG nova.compute.utils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 737.263271] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 737.263271] env[67008]: DEBUG nova.network.neutron [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 737.271353] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 737.336287] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 737.373815] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.374076] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.374239] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.374424] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.374569] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.374776] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.374985] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.375262] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.375472] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.375697] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.375951] env[67008]: DEBUG nova.virt.hardware [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.376837] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d61de0-ebb4-48e2-8259-e19333d89ea2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.384463] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ca728d-5222-4beb-a15c-72e8aaf24b7a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.401660] env[67008]: DEBUG nova.policy [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efc7701f56d14c6fa745bb97cfa42b81', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94389aa54f484a4f967de421e3d212b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 738.315136] env[67008]: DEBUG nova.network.neutron [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Successfully created port: ef056b86-ef5e-4483-b98b-413746aa1668 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.387295] env[67008]: DEBUG nova.network.neutron [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Successfully updated port: ef056b86-ef5e-4483-b98b-413746aa1668 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.409613] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "refresh_cache-8b645fe3-0a5d-4f12-a99d-1f0580432d59" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 739.409871] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquired lock "refresh_cache-8b645fe3-0a5d-4f12-a99d-1f0580432d59" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 739.410087] env[67008]: DEBUG nova.network.neutron [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.503208] env[67008]: DEBUG nova.network.neutron [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.861028] env[67008]: DEBUG nova.network.neutron [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Updating instance_info_cache with network_info: [{"id": "ef056b86-ef5e-4483-b98b-413746aa1668", "address": "fa:16:3e:68:fd:e0", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef056b86-ef", "ovs_interfaceid": "ef056b86-ef5e-4483-b98b-413746aa1668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.867421] env[67008]: DEBUG nova.compute.manager [req-a9211fd1-ef19-4dc3-89a9-61166d2e4e08 req-f7f4e344-eda0-40e8-aecc-66096b59537d service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Received event network-vif-plugged-ef056b86-ef5e-4483-b98b-413746aa1668 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 739.867631] env[67008]: DEBUG oslo_concurrency.lockutils [req-a9211fd1-ef19-4dc3-89a9-61166d2e4e08 req-f7f4e344-eda0-40e8-aecc-66096b59537d service nova] Acquiring lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 739.867853] env[67008]: DEBUG oslo_concurrency.lockutils [req-a9211fd1-ef19-4dc3-89a9-61166d2e4e08 req-f7f4e344-eda0-40e8-aecc-66096b59537d service nova] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 739.868035] env[67008]: DEBUG oslo_concurrency.lockutils [req-a9211fd1-ef19-4dc3-89a9-61166d2e4e08 req-f7f4e344-eda0-40e8-aecc-66096b59537d service nova] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 739.868205] env[67008]: DEBUG nova.compute.manager [req-a9211fd1-ef19-4dc3-89a9-61166d2e4e08 req-f7f4e344-eda0-40e8-aecc-66096b59537d service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] No waiting events found dispatching network-vif-plugged-ef056b86-ef5e-4483-b98b-413746aa1668 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 739.868366] env[67008]: WARNING nova.compute.manager [req-a9211fd1-ef19-4dc3-89a9-61166d2e4e08 req-f7f4e344-eda0-40e8-aecc-66096b59537d service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Received unexpected event network-vif-plugged-ef056b86-ef5e-4483-b98b-413746aa1668 for instance with vm_state building and task_state spawning. [ 739.876293] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Releasing lock "refresh_cache-8b645fe3-0a5d-4f12-a99d-1f0580432d59" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 739.876659] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Instance network_info: |[{"id": "ef056b86-ef5e-4483-b98b-413746aa1668", "address": "fa:16:3e:68:fd:e0", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef056b86-ef", "ovs_interfaceid": "ef056b86-ef5e-4483-b98b-413746aa1668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 739.877133] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:fd:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef056b86-ef5e-4483-b98b-413746aa1668', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.884411] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Creating folder: Project (94389aa54f484a4f967de421e3d212b7). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 739.885093] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0892a838-a5b9-4284-955e-4d38a09dfb62 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.896668] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Created folder: Project (94389aa54f484a4f967de421e3d212b7) in parent group-v567993. [ 739.896947] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Creating folder: Instances. Parent ref: group-v568032. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 739.897172] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b11f128e-3fc2-4fbb-8c7c-27f31d88e4e7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.905673] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Created folder: Instances in parent group-v568032. [ 739.905906] env[67008]: DEBUG oslo.service.loopingcall [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.906094] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 739.906282] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4248d09-ca78-4982-b38a-2e900f42ef69 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.926744] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.926744] env[67008]: value = "task-2824879" [ 739.926744] env[67008]: _type = "Task" [ 739.926744] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.934810] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824879, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.437327] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824879, 'name': CreateVM_Task, 'duration_secs': 0.314277} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.437571] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 740.438193] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 740.438354] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 740.438669] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 740.438930] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3a24120-5a1b-40c5-99a1-3acadb7f8839 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.443482] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Waiting for the task: (returnval){ [ 740.443482] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e4fbd6-1407-daa9-7541-fe1427fd66dd" [ 740.443482] env[67008]: _type = "Task" [ 740.443482] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.451826] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e4fbd6-1407-daa9-7541-fe1427fd66dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.958200] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 740.958200] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 740.958200] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 742.063696] env[67008]: DEBUG nova.compute.manager [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Received event network-changed-ef056b86-ef5e-4483-b98b-413746aa1668 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 742.063696] env[67008]: DEBUG nova.compute.manager [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Refreshing instance network info cache due to event network-changed-ef056b86-ef5e-4483-b98b-413746aa1668. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 742.065018] env[67008]: DEBUG oslo_concurrency.lockutils [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] Acquiring lock "refresh_cache-8b645fe3-0a5d-4f12-a99d-1f0580432d59" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 742.065018] env[67008]: DEBUG oslo_concurrency.lockutils [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] Acquired lock "refresh_cache-8b645fe3-0a5d-4f12-a99d-1f0580432d59" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 742.065018] env[67008]: DEBUG nova.network.neutron [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Refreshing network info cache for port ef056b86-ef5e-4483-b98b-413746aa1668 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 742.510470] env[67008]: DEBUG nova.network.neutron [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Updated VIF entry in instance network info cache for port ef056b86-ef5e-4483-b98b-413746aa1668. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 742.510470] env[67008]: DEBUG nova.network.neutron [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Updating instance_info_cache with network_info: [{"id": "ef056b86-ef5e-4483-b98b-413746aa1668", "address": "fa:16:3e:68:fd:e0", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef056b86-ef", "ovs_interfaceid": "ef056b86-ef5e-4483-b98b-413746aa1668", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.520227] env[67008]: DEBUG oslo_concurrency.lockutils [req-278654a5-4fb5-4246-96dc-2f62029093e2 req-e6a8e159-e49f-4636-8a3e-5437e6a7bb12 service nova] Releasing lock "refresh_cache-8b645fe3-0a5d-4f12-a99d-1f0580432d59" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 743.292303] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 743.292577] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 771.856458] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 771.856760] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 771.856859] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 771.867940] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 771.868168] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 771.868331] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 771.868487] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 771.869579] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ae03b5-4ae7-42e5-a750-f6f54d3d3ff0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.879261] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa655f1e-19bc-4f34-bbfb-c3b2bcc73934 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.893150] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd508e36-e750-43c4-ba8a-971c86731d0e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.899321] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7cca5d-950a-40b6-90ce-b842944edfbc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.929245] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181073MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 771.929398] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 771.929584] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 772.013085] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e5e1de51-40a9-4b43-b885-87501738dc96 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013085] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013085] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013085] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5472df57-f2bc-4a90-9251-13760f932d77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013414] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013414] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013414] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013414] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013521] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.013521] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 772.024165] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.035191] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 782f3bee-42b3-4822-a28d-9eb8a6cde1ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.045172] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance adc51b71-5867-43f2-a947-62e8e733db76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.054543] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2fef1490-9ac5-4246-8017-f68e512c51dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.064382] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 70e0c15e-d968-46ba-bb97-35d6687e9834 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.075019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 430ea81b-71f9-4074-829a-fd8a6c24098b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.084544] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83b7b19d-bc72-4ac2-992d-9dda68ff1e4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.094212] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 90f7c936-6d03-4464-8719-12ab257cb714 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.104228] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e5c8de12-61ca-4bc1-b871-b84cf802e916 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.113650] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cf29c18a-c923-46c8-ab02-277c2b5ee4d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.125326] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 35b9e3ec-c3da-4805-9c0f-7f772d7a3747 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.134400] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.143953] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 04badd98-b2f0-483d-82e9-5806dbf8edb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.153358] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72df790c-0cd5-4054-9162-9f9bd3d19239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.162262] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cb387fbf-affd-4d9f-a4ef-7eef58847130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.171426] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 30fbf80d-2893-49cb-b4e8-456d08ce4e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.180447] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2308fd77-6032-4e59-9074-1c18e9b02d87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.189585] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 80863f1b-9c19-4fcc-8692-6015d623e011 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.198961] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 411f1932-4397-4aaa-ab21-55ff90342fbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.208089] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.218576] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.218576] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 772.218576] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 772.594730] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08d1cee-56b0-475b-849b-bd7e4b5db1aa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.603108] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd99361-e22d-42f4-babd-efc80fc88b70 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.632247] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8e0978-e1ea-466d-9f50-78fd35dcf82a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.639315] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37bee3a-12ca-46bb-9a75-507859b80459 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.652687] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.661463] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 772.674937] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 772.675314] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.746s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 774.675651] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.675943] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 774.676151] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 774.857026] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.851705] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.872530] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 775.872705] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 775.872854] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 775.893629] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894036] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894036] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894036] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894245] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894279] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894402] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894521] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894642] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894762] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 775.894909] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 775.895391] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 776.895731] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.180950] env[67008]: WARNING oslo_vmware.rw_handles [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 781.180950] env[67008]: ERROR oslo_vmware.rw_handles [ 781.181550] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 781.183050] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 781.183298] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Copying Virtual Disk [datastore1] vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/0a194e90-9e62-4c7f-b032-48a98ae27035/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 781.183572] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d8d282e-3aab-4460-866b-2ebe43d951f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.191364] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Waiting for the task: (returnval){ [ 781.191364] env[67008]: value = "task-2824880" [ 781.191364] env[67008]: _type = "Task" [ 781.191364] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.199377] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Task: {'id': task-2824880, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.701890] env[67008]: DEBUG oslo_vmware.exceptions [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 781.702217] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 781.702747] env[67008]: ERROR nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.702747] env[67008]: Faults: ['InvalidArgument'] [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Traceback (most recent call last): [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] yield resources [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self.driver.spawn(context, instance, image_meta, [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self._fetch_image_if_missing(context, vi) [ 781.702747] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] image_cache(vi, tmp_image_ds_loc) [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] vm_util.copy_virtual_disk( [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] session._wait_for_task(vmdk_copy_task) [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] return self.wait_for_task(task_ref) [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] return evt.wait() [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] result = hub.switch() [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 781.703096] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] return self.greenlet.switch() [ 781.703388] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 781.703388] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self.f(*self.args, **self.kw) [ 781.703388] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 781.703388] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] raise exceptions.translate_fault(task_info.error) [ 781.703388] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 781.703388] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Faults: ['InvalidArgument'] [ 781.703388] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] [ 781.703388] env[67008]: INFO nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Terminating instance [ 781.704616] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 781.705076] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.705250] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7246e2c-0516-4359-8229-264218f8b897 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.707527] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 781.707806] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 781.708539] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7355e5-6560-423d-a2cb-aa39d52616e7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.715669] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 781.715935] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-deafa486-6e1d-4fa9-bf8e-40425a86b203 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.718134] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.718349] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 781.719322] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2d5c843-1d89-482c-8126-270f2a64cf94 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.724089] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Waiting for the task: (returnval){ [ 781.724089] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]527d69d5-c4b6-9dca-2cb9-b0acb1f75dc5" [ 781.724089] env[67008]: _type = "Task" [ 781.724089] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.731022] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]527d69d5-c4b6-9dca-2cb9-b0acb1f75dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.788287] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 781.788502] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 781.788681] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Deleting the datastore file [datastore1] e5e1de51-40a9-4b43-b885-87501738dc96 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.788950] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea937ae1-9c9a-40e2-8189-71be7ae02bc1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.795549] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Waiting for the task: (returnval){ [ 781.795549] env[67008]: value = "task-2824882" [ 781.795549] env[67008]: _type = "Task" [ 781.795549] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.804568] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Task: {'id': task-2824882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.234425] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 782.234776] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Creating directory with path [datastore1] vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.234964] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e7ccdd4-1e90-400f-a289-013bbd348093 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.246492] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Created directory with path [datastore1] vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.246687] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Fetch image to [datastore1] vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 782.246852] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 782.247609] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc32dfb2-5fab-4e6d-84b7-6cd400e1bd5f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.255624] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5437f3b1-4878-4b8d-9082-12dea8e0b239 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.265916] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388960f5-9d6e-4ed7-b029-dd7f1749b226 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.300068] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baf3ec8-0277-4d8b-a235-ebc4adfc52ab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.307413] env[67008]: DEBUG oslo_vmware.api [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Task: {'id': task-2824882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084447} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.308876] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.309073] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 782.309248] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 782.309419] env[67008]: INFO nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Took 0.60 seconds to destroy the instance on the hypervisor. [ 782.311179] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c6770935-c9a9-402e-ae5a-e49403946091 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.313039] env[67008]: DEBUG nova.compute.claims [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 782.313217] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 782.313428] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 782.343490] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 782.401227] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 782.463141] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 782.463422] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 782.767367] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7627a50-d1b9-4f0e-822b-d6164276d1a8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.775624] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cd292a-8850-4e46-a38d-03871f580b07 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.805887] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439e2615-3024-4f3c-b480-dca9abf52ffd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.814457] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7940cd2-af4e-4a37-aa32-7437a4081453 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.827610] env[67008]: DEBUG nova.compute.provider_tree [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.836725] env[67008]: DEBUG nova.scheduler.client.report [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 782.851923] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.538s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 782.852465] env[67008]: ERROR nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.852465] env[67008]: Faults: ['InvalidArgument'] [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Traceback (most recent call last): [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self.driver.spawn(context, instance, image_meta, [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self._fetch_image_if_missing(context, vi) [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] image_cache(vi, tmp_image_ds_loc) [ 782.852465] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] vm_util.copy_virtual_disk( [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] session._wait_for_task(vmdk_copy_task) [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] return self.wait_for_task(task_ref) [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] return evt.wait() [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] result = hub.switch() [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] return self.greenlet.switch() [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 782.852820] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] self.f(*self.args, **self.kw) [ 782.853197] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 782.853197] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] raise exceptions.translate_fault(task_info.error) [ 782.853197] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 782.853197] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Faults: ['InvalidArgument'] [ 782.853197] env[67008]: ERROR nova.compute.manager [instance: e5e1de51-40a9-4b43-b885-87501738dc96] [ 782.853443] env[67008]: DEBUG nova.compute.utils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.854716] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Build of instance e5e1de51-40a9-4b43-b885-87501738dc96 was re-scheduled: A specified parameter was not correct: fileType [ 782.854716] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 782.855122] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 782.855301] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 782.855448] env[67008]: DEBUG nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 782.855609] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.374662] env[67008]: DEBUG nova.network.neutron [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.388168] env[67008]: INFO nova.compute.manager [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] [instance: e5e1de51-40a9-4b43-b885-87501738dc96] Took 0.53 seconds to deallocate network for instance. [ 783.504688] env[67008]: INFO nova.scheduler.client.report [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Deleted allocations for instance e5e1de51-40a9-4b43-b885-87501738dc96 [ 783.524223] env[67008]: DEBUG oslo_concurrency.lockutils [None req-281f839f-ba51-4bce-b9c3-cc3c2b0bea5d tempest-ServerExternalEventsTest-1118844566 tempest-ServerExternalEventsTest-1118844566-project-member] Lock "e5e1de51-40a9-4b43-b885-87501738dc96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.128s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 783.536345] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 783.596094] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 783.596370] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 783.598484] env[67008]: INFO nova.compute.claims [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.002628] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425fc68e-d5ec-4997-b8ce-e4a25b317594 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.010947] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63766ff4-4972-43df-9ed8-c0f32eeb89d9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.042677] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941110dc-e622-4289-9d34-f4f78365f7a4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.050917] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acef552b-6ff5-4b5c-a339-45b0fcd4ba06 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.064625] env[67008]: DEBUG nova.compute.provider_tree [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.073314] env[67008]: DEBUG nova.scheduler.client.report [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 784.091217] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.495s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 784.091682] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 784.129712] env[67008]: DEBUG nova.compute.utils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.131877] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 784.132338] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 784.143603] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 784.220073] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 784.246195] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.246195] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.246195] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.246447] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.246447] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.246447] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.246447] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.246447] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.246878] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.247176] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.247469] env[67008]: DEBUG nova.virt.hardware [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.248499] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2b2a6d-4e23-476f-8352-4c668d7c4499 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.253987] env[67008]: DEBUG nova.policy [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0002cdf0acb847f1919bbbd60de75252', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97b7c882f29c4d24939aba1374f0b196', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 784.261019] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5611f76-fd1a-48ed-9375-0f31898313f5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.049157] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Successfully created port: dee96108-90f2-4a39-8232-52b20360691e {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.585190] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Successfully updated port: dee96108-90f2-4a39-8232-52b20360691e {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.603620] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "refresh_cache-623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 786.603620] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquired lock "refresh_cache-623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 786.603620] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.670286] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.937928] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Updating instance_info_cache with network_info: [{"id": "dee96108-90f2-4a39-8232-52b20360691e", "address": "fa:16:3e:f9:27:96", "network": {"id": "4f98e098-0131-43c9-9fe3-d1e572ad6f79", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-798517348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b7c882f29c4d24939aba1374f0b196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdee96108-90", "ovs_interfaceid": "dee96108-90f2-4a39-8232-52b20360691e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.952595] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Releasing lock "refresh_cache-623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 786.952906] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Instance network_info: |[{"id": "dee96108-90f2-4a39-8232-52b20360691e", "address": "fa:16:3e:f9:27:96", "network": {"id": "4f98e098-0131-43c9-9fe3-d1e572ad6f79", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-798517348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b7c882f29c4d24939aba1374f0b196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdee96108-90", "ovs_interfaceid": "dee96108-90f2-4a39-8232-52b20360691e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 786.953328] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:27:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dee96108-90f2-4a39-8232-52b20360691e', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.961122] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Creating folder: Project (97b7c882f29c4d24939aba1374f0b196). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 786.961677] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea7b18e3-29e2-4d04-9d30-b1fef959d829 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.969685] env[67008]: DEBUG nova.compute.manager [req-60d19caf-fd4a-4ee8-a802-2da10ca3560b req-4bf97b57-ac10-4a40-9547-83a72f18448e service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Received event network-vif-plugged-dee96108-90f2-4a39-8232-52b20360691e {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 786.969956] env[67008]: DEBUG oslo_concurrency.lockutils [req-60d19caf-fd4a-4ee8-a802-2da10ca3560b req-4bf97b57-ac10-4a40-9547-83a72f18448e service nova] Acquiring lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 786.970114] env[67008]: DEBUG oslo_concurrency.lockutils [req-60d19caf-fd4a-4ee8-a802-2da10ca3560b req-4bf97b57-ac10-4a40-9547-83a72f18448e service nova] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 786.970279] env[67008]: DEBUG oslo_concurrency.lockutils [req-60d19caf-fd4a-4ee8-a802-2da10ca3560b req-4bf97b57-ac10-4a40-9547-83a72f18448e service nova] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 786.970482] env[67008]: DEBUG nova.compute.manager [req-60d19caf-fd4a-4ee8-a802-2da10ca3560b req-4bf97b57-ac10-4a40-9547-83a72f18448e service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] No waiting events found dispatching network-vif-plugged-dee96108-90f2-4a39-8232-52b20360691e {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 786.970587] env[67008]: WARNING nova.compute.manager [req-60d19caf-fd4a-4ee8-a802-2da10ca3560b req-4bf97b57-ac10-4a40-9547-83a72f18448e service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Received unexpected event network-vif-plugged-dee96108-90f2-4a39-8232-52b20360691e for instance with vm_state building and task_state spawning. [ 786.973857] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Created folder: Project (97b7c882f29c4d24939aba1374f0b196) in parent group-v567993. [ 786.974048] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Creating folder: Instances. Parent ref: group-v568035. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 786.974277] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1338340b-734e-4b52-bb50-5a7d4b8d53ce {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.982350] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Created folder: Instances in parent group-v568035. [ 786.982568] env[67008]: DEBUG oslo.service.loopingcall [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.982743] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 786.982970] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcabbd57-bd97-4e14-bcab-2bf45a667a36 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.001076] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.001076] env[67008]: value = "task-2824885" [ 787.001076] env[67008]: _type = "Task" [ 787.001076] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.008342] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824885, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.511339] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824885, 'name': CreateVM_Task, 'duration_secs': 0.311859} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.511636] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 787.512336] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 787.512506] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 787.512816] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 787.513118] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7c351a1-0ed1-45f6-9916-55b5c0ca6122 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.518475] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Waiting for the task: (returnval){ [ 787.518475] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52223ac2-49fa-9bda-1878-b50b40a3741f" [ 787.518475] env[67008]: _type = "Task" [ 787.518475] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.526637] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52223ac2-49fa-9bda-1878-b50b40a3741f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.028570] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 788.028887] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.029064] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 788.707593] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 788.992493] env[67008]: DEBUG nova.compute.manager [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Received event network-changed-dee96108-90f2-4a39-8232-52b20360691e {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 788.992685] env[67008]: DEBUG nova.compute.manager [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Refreshing instance network info cache due to event network-changed-dee96108-90f2-4a39-8232-52b20360691e. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 788.992890] env[67008]: DEBUG oslo_concurrency.lockutils [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] Acquiring lock "refresh_cache-623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 788.993040] env[67008]: DEBUG oslo_concurrency.lockutils [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] Acquired lock "refresh_cache-623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 788.993211] env[67008]: DEBUG nova.network.neutron [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Refreshing network info cache for port dee96108-90f2-4a39-8232-52b20360691e {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 789.457826] env[67008]: DEBUG nova.network.neutron [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Updated VIF entry in instance network info cache for port dee96108-90f2-4a39-8232-52b20360691e. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 789.458244] env[67008]: DEBUG nova.network.neutron [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Updating instance_info_cache with network_info: [{"id": "dee96108-90f2-4a39-8232-52b20360691e", "address": "fa:16:3e:f9:27:96", "network": {"id": "4f98e098-0131-43c9-9fe3-d1e572ad6f79", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-798517348-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b7c882f29c4d24939aba1374f0b196", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdee96108-90", "ovs_interfaceid": "dee96108-90f2-4a39-8232-52b20360691e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.467660] env[67008]: DEBUG oslo_concurrency.lockutils [req-d01ad862-8f96-47e7-8cee-4eaac1e89c6e req-b66fcaf9-d775-4ee0-b222-c8dbcdfee717 service nova] Releasing lock "refresh_cache-623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 792.376049] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 798.659768] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 799.936257] env[67008]: DEBUG oslo_concurrency.lockutils [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "72dc7fb5-e94e-4784-9864-a1731ea7c755" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 800.285331] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 800.771736] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "40a26f4e-0be9-4770-83a7-31c87dbf921f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 807.743044] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 808.351828] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 808.878465] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 815.601616] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 815.601955] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 821.060634] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1a5e6332-6649-4613-a43b-38d8d0a8978c tempest-ServerActionsTestOtherA-2029932528 tempest-ServerActionsTestOtherA-2029932528-project-member] Acquiring lock "50af6f13-9a91-45d9-94db-4e4e84c186a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 821.060999] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1a5e6332-6649-4613-a43b-38d8d0a8978c tempest-ServerActionsTestOtherA-2029932528 tempest-ServerActionsTestOtherA-2029932528-project-member] Lock "50af6f13-9a91-45d9-94db-4e4e84c186a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 824.824505] env[67008]: DEBUG oslo_concurrency.lockutils [None req-08967cdb-bf5f-461b-a00d-6fe168e2b6c3 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] Acquiring lock "58b2f966-3574-4318-bb5c-7f9b018ab763" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 824.826399] env[67008]: DEBUG oslo_concurrency.lockutils [None req-08967cdb-bf5f-461b-a00d-6fe168e2b6c3 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] Lock "58b2f966-3574-4318-bb5c-7f9b018ab763" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 831.215014] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ce8fbe7e-44e2-497b-a689-1a6d4871d359 tempest-ServerDiagnosticsNegativeTest-921604856 tempest-ServerDiagnosticsNegativeTest-921604856-project-member] Acquiring lock "85627a1c-95e2-4959-81cc-6e25c8c8553d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 831.215303] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ce8fbe7e-44e2-497b-a689-1a6d4871d359 tempest-ServerDiagnosticsNegativeTest-921604856 tempest-ServerDiagnosticsNegativeTest-921604856-project-member] Lock "85627a1c-95e2-4959-81cc-6e25c8c8553d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 831.858858] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.858858] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.877216] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 831.877431] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 831.877594] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 831.877744] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 831.882276] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91b28411-6b36-4214-aa2c-5aab71a072e0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.895203] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d858b94-c855-4110-bf9c-c22fc7d9fc72 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.914931] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfac6a2b-515c-4fa2-852b-62fd277b8d83 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.922736] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4e078e-bd50-465e-a33c-a06fd05c0155 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.961617] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181045MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 831.961617] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 831.961617] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 832.082059] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.082347] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.082527] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5472df57-f2bc-4a90-9251-13760f932d77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.082527] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.082671] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.082757] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.082874] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.082991] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.083120] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.083238] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.097414] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 35b9e3ec-c3da-4805-9c0f-7f772d7a3747 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.115740] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.130830] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 04badd98-b2f0-483d-82e9-5806dbf8edb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.147949] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72df790c-0cd5-4054-9162-9f9bd3d19239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.163716] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance cb387fbf-affd-4d9f-a4ef-7eef58847130 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.189937] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 30fbf80d-2893-49cb-b4e8-456d08ce4e3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.203940] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2308fd77-6032-4e59-9074-1c18e9b02d87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.218118] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 80863f1b-9c19-4fcc-8692-6015d623e011 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.236065] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 411f1932-4397-4aaa-ab21-55ff90342fbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.248851] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.267435] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.284420] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.303582] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 50af6f13-9a91-45d9-94db-4e4e84c186a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.324215] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 58b2f966-3574-4318-bb5c-7f9b018ab763 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.343622] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 85627a1c-95e2-4959-81cc-6e25c8c8553d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.344022] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 832.344220] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 832.462921] env[67008]: WARNING oslo_vmware.rw_handles [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 832.462921] env[67008]: ERROR oslo_vmware.rw_handles [ 832.463290] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 832.465546] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 832.465546] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Copying Virtual Disk [datastore1] vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/fecd8898-1b09-4ef4-a22c-402ed7ac9ab2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 832.466769] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-686d114d-51c7-4231-a486-a088ba8c89f9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.476184] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Waiting for the task: (returnval){ [ 832.476184] env[67008]: value = "task-2824886" [ 832.476184] env[67008]: _type = "Task" [ 832.476184] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.484924] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Task: {'id': task-2824886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.795388] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78738765-b569-4e4b-9174-14311634e7de {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.803293] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c25686e-5525-4d24-8aa3-35392fc065d2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.839395] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37f613b-840b-4c1a-8670-950245d4accf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.847367] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f1ac2c-c305-422d-8439-0503136cb2f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.863040] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.877338] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 832.899470] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 832.899674] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.938s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 832.899894] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.900045] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 832.918112] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] There are 0 instances to clean {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 832.918510] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.918510] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances with incomplete migration {{(pid=67008) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 832.929615] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.988134] env[67008]: DEBUG oslo_vmware.exceptions [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 832.988617] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 832.992029] env[67008]: ERROR nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 832.992029] env[67008]: Faults: ['InvalidArgument'] [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Traceback (most recent call last): [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] yield resources [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self.driver.spawn(context, instance, image_meta, [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self._fetch_image_if_missing(context, vi) [ 832.992029] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] image_cache(vi, tmp_image_ds_loc) [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] vm_util.copy_virtual_disk( [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] session._wait_for_task(vmdk_copy_task) [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] return self.wait_for_task(task_ref) [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] return evt.wait() [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] result = hub.switch() [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 832.992615] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] return self.greenlet.switch() [ 832.992967] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 832.992967] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self.f(*self.args, **self.kw) [ 832.992967] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 832.992967] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] raise exceptions.translate_fault(task_info.error) [ 832.992967] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 832.992967] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Faults: ['InvalidArgument'] [ 832.992967] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] [ 832.992967] env[67008]: INFO nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Terminating instance [ 832.992967] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 832.993666] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.993666] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 832.993666] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 832.993774] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-556817a4-d5a0-459d-9db3-80d58a405d50 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.997664] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de6bd90-9154-438c-97af-fe76915a7c86 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.004577] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 833.004577] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d66c16f-7c00-419c-a453-d4922a56de7c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.006829] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.007054] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 833.008125] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8e32726-db9a-465e-8e59-302ec0accee4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.013195] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Waiting for the task: (returnval){ [ 833.013195] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5274bc84-03c5-94bb-40c3-90362f7f6a57" [ 833.013195] env[67008]: _type = "Task" [ 833.013195] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.021372] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5274bc84-03c5-94bb-40c3-90362f7f6a57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.081692] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 833.081901] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 833.082087] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Deleting the datastore file [datastore1] 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.082339] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d697785d-a8c4-43f5-bc22-8d1c701be11f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.088597] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Waiting for the task: (returnval){ [ 833.088597] env[67008]: value = "task-2824888" [ 833.088597] env[67008]: _type = "Task" [ 833.088597] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.096751] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Task: {'id': task-2824888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.524515] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 833.524822] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Creating directory with path [datastore1] vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.525071] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47868edf-2d2c-481d-a7cf-33d34d082b07 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.537112] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Created directory with path [datastore1] vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.537322] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Fetch image to [datastore1] vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 833.537529] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 833.538316] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677e7513-fb67-46bb-9f13-b968306f6758 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.546897] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c770aad8-5fae-4137-89b1-86cb45f710f5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.559182] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d52662b-5c46-497d-801f-8ca20cfe1250 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.604890] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ff881c-dd92-4d69-aee5-da5650735a29 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.614081] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0898773b-8231-4cb7-b6b1-bc3926320087 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.616370] env[67008]: DEBUG oslo_vmware.api [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Task: {'id': task-2824888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087197} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.617437] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.617437] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 833.617437] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 833.617437] env[67008]: INFO nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Took 0.62 seconds to destroy the instance on the hypervisor. [ 833.619426] env[67008]: DEBUG nova.compute.claims [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 833.619600] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 833.620513] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 833.639265] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 833.702370] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 833.766896] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 833.767109] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 833.937623] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.938453] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.938453] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 834.182168] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 834.182445] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 834.191377] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11ae6dd-76a0-49c8-95c0-0be4ca6fc7c5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.199493] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162a976a-4902-4100-a1ad-4159912b8f15 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.232668] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f863931d-5cf1-4988-a3a5-765d498fa83d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.240442] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e7dc5f-01b7-41e3-a4c1-157584edbda1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.255604] env[67008]: DEBUG nova.compute.provider_tree [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.265437] env[67008]: DEBUG nova.scheduler.client.report [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 834.284204] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.664s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 834.284742] env[67008]: ERROR nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 834.284742] env[67008]: Faults: ['InvalidArgument'] [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Traceback (most recent call last): [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self.driver.spawn(context, instance, image_meta, [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self._fetch_image_if_missing(context, vi) [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] image_cache(vi, tmp_image_ds_loc) [ 834.284742] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] vm_util.copy_virtual_disk( [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] session._wait_for_task(vmdk_copy_task) [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] return self.wait_for_task(task_ref) [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] return evt.wait() [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] result = hub.switch() [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] return self.greenlet.switch() [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 834.285671] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] self.f(*self.args, **self.kw) [ 834.286084] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 834.286084] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] raise exceptions.translate_fault(task_info.error) [ 834.286084] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 834.286084] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Faults: ['InvalidArgument'] [ 834.286084] env[67008]: ERROR nova.compute.manager [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] [ 834.286084] env[67008]: DEBUG nova.compute.utils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 834.287060] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Build of instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 was re-scheduled: A specified parameter was not correct: fileType [ 834.287060] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 834.287458] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 834.288243] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 834.288243] env[67008]: DEBUG nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 834.288243] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.005838] env[67008]: DEBUG nova.network.neutron [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.019398] env[67008]: INFO nova.compute.manager [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Took 0.73 seconds to deallocate network for instance. [ 835.121445] env[67008]: DEBUG oslo_concurrency.lockutils [None req-203518ab-81bc-4d75-95eb-c17ab8e19a24 tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] Acquiring lock "a4246977-28df-49ba-b0f5-3f37930aac5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 835.121723] env[67008]: DEBUG oslo_concurrency.lockutils [None req-203518ab-81bc-4d75-95eb-c17ab8e19a24 tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] Lock "a4246977-28df-49ba-b0f5-3f37930aac5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 835.154829] env[67008]: INFO nova.scheduler.client.report [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Deleted allocations for instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 [ 835.177870] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c7b0316-aaea-417f-93dc-ca0ce5a7c99a tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 246.139s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.179057] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 46.472s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 835.179294] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Acquiring lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 835.179495] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 835.179659] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.183849] env[67008]: INFO nova.compute.manager [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Terminating instance [ 835.185719] env[67008]: DEBUG nova.compute.manager [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 835.185935] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 835.186710] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a5674e9-4b9b-4bfe-a209-42f291390ad1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.195699] env[67008]: DEBUG nova.compute.manager [None req-974091af-24dd-45c7-9716-f72a2a98ab7f tempest-VolumesAssistedSnapshotsTest-588891309 tempest-VolumesAssistedSnapshotsTest-588891309-project-member] [instance: 782f3bee-42b3-4822-a28d-9eb8a6cde1ab] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.202792] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e25057-d702-4f10-a8d1-f3112f57099b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.233787] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24f99c22-49e9-486a-a2d7-a02a8da3f6d3 could not be found. [ 835.234093] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 835.234320] env[67008]: INFO nova.compute.manager [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 835.234642] env[67008]: DEBUG oslo.service.loopingcall [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.235527] env[67008]: DEBUG nova.compute.manager [-] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 835.235625] env[67008]: DEBUG nova.network.neutron [-] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.237638] env[67008]: DEBUG nova.compute.manager [None req-974091af-24dd-45c7-9716-f72a2a98ab7f tempest-VolumesAssistedSnapshotsTest-588891309 tempest-VolumesAssistedSnapshotsTest-588891309-project-member] [instance: 782f3bee-42b3-4822-a28d-9eb8a6cde1ab] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.271269] env[67008]: DEBUG nova.network.neutron [-] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.273248] env[67008]: DEBUG oslo_concurrency.lockutils [None req-974091af-24dd-45c7-9716-f72a2a98ab7f tempest-VolumesAssistedSnapshotsTest-588891309 tempest-VolumesAssistedSnapshotsTest-588891309-project-member] Lock "782f3bee-42b3-4822-a28d-9eb8a6cde1ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.285s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.283642] env[67008]: INFO nova.compute.manager [-] [instance: 24f99c22-49e9-486a-a2d7-a02a8da3f6d3] Took 0.05 seconds to deallocate network for instance. [ 835.289870] env[67008]: DEBUG nova.compute.manager [None req-5034493d-17d0-4d29-9d2e-64cadd0f7655 tempest-ImagesNegativeTestJSON-1485666449 tempest-ImagesNegativeTestJSON-1485666449-project-member] [instance: adc51b71-5867-43f2-a947-62e8e733db76] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.315407] env[67008]: DEBUG nova.compute.manager [None req-5034493d-17d0-4d29-9d2e-64cadd0f7655 tempest-ImagesNegativeTestJSON-1485666449 tempest-ImagesNegativeTestJSON-1485666449-project-member] [instance: adc51b71-5867-43f2-a947-62e8e733db76] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.371050] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5034493d-17d0-4d29-9d2e-64cadd0f7655 tempest-ImagesNegativeTestJSON-1485666449 tempest-ImagesNegativeTestJSON-1485666449-project-member] Lock "adc51b71-5867-43f2-a947-62e8e733db76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.695s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.384794] env[67008]: DEBUG nova.compute.manager [None req-74cb615e-3297-4bbb-ba17-228c2c907cc6 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 2fef1490-9ac5-4246-8017-f68e512c51dd] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.427906] env[67008]: DEBUG nova.compute.manager [None req-74cb615e-3297-4bbb-ba17-228c2c907cc6 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 2fef1490-9ac5-4246-8017-f68e512c51dd] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.464432] env[67008]: DEBUG oslo_concurrency.lockutils [None req-74cb615e-3297-4bbb-ba17-228c2c907cc6 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "2fef1490-9ac5-4246-8017-f68e512c51dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.364s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.481113] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b14dc85-18ee-4396-9cdf-47e1acaabb4b tempest-FloatingIPsAssociationTestJSON-467183986 tempest-FloatingIPsAssociationTestJSON-467183986-project-member] Lock "24f99c22-49e9-486a-a2d7-a02a8da3f6d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.302s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.482498] env[67008]: DEBUG nova.compute.manager [None req-1565f0ba-6ad2-42a8-b11e-a289dcb589d1 tempest-ServersWithSpecificFlavorTestJSON-1136835190 tempest-ServersWithSpecificFlavorTestJSON-1136835190-project-member] [instance: 70e0c15e-d968-46ba-bb97-35d6687e9834] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.509079] env[67008]: DEBUG nova.compute.manager [None req-1565f0ba-6ad2-42a8-b11e-a289dcb589d1 tempest-ServersWithSpecificFlavorTestJSON-1136835190 tempest-ServersWithSpecificFlavorTestJSON-1136835190-project-member] [instance: 70e0c15e-d968-46ba-bb97-35d6687e9834] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.638602] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1565f0ba-6ad2-42a8-b11e-a289dcb589d1 tempest-ServersWithSpecificFlavorTestJSON-1136835190 tempest-ServersWithSpecificFlavorTestJSON-1136835190-project-member] Lock "70e0c15e-d968-46ba-bb97-35d6687e9834" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.826s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.652325] env[67008]: DEBUG nova.compute.manager [None req-2af9868f-dfeb-44fc-a689-07f0d19bc7f0 tempest-ServersAdmin275Test-1466662230 tempest-ServersAdmin275Test-1466662230-project-member] [instance: 430ea81b-71f9-4074-829a-fd8a6c24098b] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.684720] env[67008]: DEBUG nova.compute.manager [None req-2af9868f-dfeb-44fc-a689-07f0d19bc7f0 tempest-ServersAdmin275Test-1466662230 tempest-ServersAdmin275Test-1466662230-project-member] [instance: 430ea81b-71f9-4074-829a-fd8a6c24098b] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.711546] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2af9868f-dfeb-44fc-a689-07f0d19bc7f0 tempest-ServersAdmin275Test-1466662230 tempest-ServersAdmin275Test-1466662230-project-member] Lock "430ea81b-71f9-4074-829a-fd8a6c24098b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.955s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.721282] env[67008]: DEBUG nova.compute.manager [None req-fbba38aa-bfa2-4459-ab5d-354d5f161600 tempest-ServersTestJSON-1031739700 tempest-ServersTestJSON-1031739700-project-member] [instance: 83b7b19d-bc72-4ac2-992d-9dda68ff1e4d] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.751237] env[67008]: DEBUG nova.compute.manager [None req-fbba38aa-bfa2-4459-ab5d-354d5f161600 tempest-ServersTestJSON-1031739700 tempest-ServersTestJSON-1031739700-project-member] [instance: 83b7b19d-bc72-4ac2-992d-9dda68ff1e4d] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.778828] env[67008]: DEBUG oslo_concurrency.lockutils [None req-fbba38aa-bfa2-4459-ab5d-354d5f161600 tempest-ServersTestJSON-1031739700 tempest-ServersTestJSON-1031739700-project-member] Lock "83b7b19d-bc72-4ac2-992d-9dda68ff1e4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.423s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.788837] env[67008]: DEBUG nova.compute.manager [None req-92132d17-e5fd-412e-84cc-20d0960e7d37 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] [instance: 90f7c936-6d03-4464-8719-12ab257cb714] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.817337] env[67008]: DEBUG nova.compute.manager [None req-92132d17-e5fd-412e-84cc-20d0960e7d37 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] [instance: 90f7c936-6d03-4464-8719-12ab257cb714] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.840676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-92132d17-e5fd-412e-84cc-20d0960e7d37 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] Lock "90f7c936-6d03-4464-8719-12ab257cb714" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.838s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.855020] env[67008]: DEBUG nova.compute.manager [None req-941774ce-1893-4b38-8692-be5227511fcb tempest-ServersTestBootFromVolume-343587925 tempest-ServersTestBootFromVolume-343587925-project-member] [instance: e5c8de12-61ca-4bc1-b871-b84cf802e916] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.860556] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 835.860713] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 835.860832] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 835.879993] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.880173] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.880440] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.880440] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.880543] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.880717] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.880790] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.880897] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.882080] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 835.883239] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 835.884328] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 835.886219] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 835.886219] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 835.894389] env[67008]: DEBUG nova.compute.manager [None req-941774ce-1893-4b38-8692-be5227511fcb tempest-ServersTestBootFromVolume-343587925 tempest-ServersTestBootFromVolume-343587925-project-member] [instance: e5c8de12-61ca-4bc1-b871-b84cf802e916] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.920046] env[67008]: DEBUG oslo_concurrency.lockutils [None req-941774ce-1893-4b38-8692-be5227511fcb tempest-ServersTestBootFromVolume-343587925 tempest-ServersTestBootFromVolume-343587925-project-member] Lock "e5c8de12-61ca-4bc1-b871-b84cf802e916" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.261s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 835.933135] env[67008]: DEBUG nova.compute.manager [None req-41f894d9-4a9e-47dd-b96f-0ee805883b92 tempest-ServerRescueTestJSONUnderV235-1962325397 tempest-ServerRescueTestJSONUnderV235-1962325397-project-member] [instance: cf29c18a-c923-46c8-ab02-277c2b5ee4d6] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 835.964146] env[67008]: DEBUG nova.compute.manager [None req-41f894d9-4a9e-47dd-b96f-0ee805883b92 tempest-ServerRescueTestJSONUnderV235-1962325397 tempest-ServerRescueTestJSONUnderV235-1962325397-project-member] [instance: cf29c18a-c923-46c8-ab02-277c2b5ee4d6] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 835.993176] env[67008]: DEBUG oslo_concurrency.lockutils [None req-41f894d9-4a9e-47dd-b96f-0ee805883b92 tempest-ServerRescueTestJSONUnderV235-1962325397 tempest-ServerRescueTestJSONUnderV235-1962325397-project-member] Lock "cf29c18a-c923-46c8-ab02-277c2b5ee4d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.036s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 836.004084] env[67008]: DEBUG nova.compute.manager [None req-3a3e81f2-1f50-4e35-90a0-6e01a7e1864c tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] [instance: 35b9e3ec-c3da-4805-9c0f-7f772d7a3747] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 836.033074] env[67008]: DEBUG nova.compute.manager [None req-3a3e81f2-1f50-4e35-90a0-6e01a7e1864c tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] [instance: 35b9e3ec-c3da-4805-9c0f-7f772d7a3747] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 836.055738] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3a3e81f2-1f50-4e35-90a0-6e01a7e1864c tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] Lock "35b9e3ec-c3da-4805-9c0f-7f772d7a3747" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.580s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 836.066294] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 836.135946] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 836.136236] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 836.137749] env[67008]: INFO nova.compute.claims [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.210137] env[67008]: DEBUG nova.scheduler.client.report [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Refreshing inventories for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 836.230038] env[67008]: DEBUG nova.scheduler.client.report [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Updating ProviderTree inventory for provider ad100a41-192a-4a03-bdd9-0a78ce856705 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 836.230038] env[67008]: DEBUG nova.compute.provider_tree [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Updating inventory in ProviderTree for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 836.253541] env[67008]: DEBUG nova.scheduler.client.report [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Refreshing aggregate associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, aggregates: None {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 836.287219] env[67008]: DEBUG nova.scheduler.client.report [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Refreshing trait associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 836.684109] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adab9ede-3bfe-49ca-9d67-ccb64ec73918 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.694168] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ba4596-f5c8-4419-a5a6-8677ff0c98ce {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.725831] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01875a4-3fd2-4dfe-9a06-5b66bdfe5708 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.733805] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32aed05-6563-4b99-9fef-d35888511c8a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.750090] env[67008]: DEBUG nova.compute.provider_tree [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.760746] env[67008]: DEBUG nova.scheduler.client.report [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 836.781716] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.645s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 836.782285] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 836.824937] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d9e29e43-6988-40c2-88a3-947906217526 tempest-ServersV294TestFqdnHostnames-347460946 tempest-ServersV294TestFqdnHostnames-347460946-project-member] Acquiring lock "54b07ba9-b49e-4c00-8775-2edb47ca7b3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 836.825544] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d9e29e43-6988-40c2-88a3-947906217526 tempest-ServersV294TestFqdnHostnames-347460946 tempest-ServersV294TestFqdnHostnames-347460946-project-member] Lock "54b07ba9-b49e-4c00-8775-2edb47ca7b3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 836.827993] env[67008]: DEBUG nova.compute.utils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.829659] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 836.829827] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 836.843587] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 836.931989] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 836.969172] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 836.969172] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 836.969172] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.969357] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 836.969506] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.969690] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 836.969896] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 836.971979] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 836.972361] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 836.972456] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 836.972597] env[67008]: DEBUG nova.virt.hardware [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.973772] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba607b1-710a-4240-9824-cf41d16193b7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.982296] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c0d7d9-ff06-40a0-a1e8-039c6f79b883 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.999307] env[67008]: DEBUG nova.policy [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '645c897e2c0f41009463aba4ce06f047', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ffb1c5fe6bac4371ba07d4e70d138870', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 838.016206] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Successfully created port: e0953683-f723-4691-ae28-9c55b66c6a44 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.875600] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 839.446224] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Successfully updated port: e0953683-f723-4691-ae28-9c55b66c6a44 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.459354] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "refresh_cache-804b3e12-f8a6-46e7-ba00-93e0da2d23d5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 839.459514] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired lock "refresh_cache-804b3e12-f8a6-46e7-ba00-93e0da2d23d5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 839.459665] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.536580] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.932110] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Updating instance_info_cache with network_info: [{"id": "e0953683-f723-4691-ae28-9c55b66c6a44", "address": "fa:16:3e:23:fb:15", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0953683-f7", "ovs_interfaceid": "e0953683-f723-4691-ae28-9c55b66c6a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.950035] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Releasing lock "refresh_cache-804b3e12-f8a6-46e7-ba00-93e0da2d23d5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 839.951013] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Instance network_info: |[{"id": "e0953683-f723-4691-ae28-9c55b66c6a44", "address": "fa:16:3e:23:fb:15", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0953683-f7", "ovs_interfaceid": "e0953683-f723-4691-ae28-9c55b66c6a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 839.951127] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:fb:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0953683-f723-4691-ae28-9c55b66c6a44', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.959794] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Creating folder: Project (ffb1c5fe6bac4371ba07d4e70d138870). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 839.961360] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acc70352-df01-4845-9736-453f9451468d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.975078] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Created folder: Project (ffb1c5fe6bac4371ba07d4e70d138870) in parent group-v567993. [ 839.975233] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Creating folder: Instances. Parent ref: group-v568038. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 839.975521] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e658148a-e710-46ab-9f04-58787a6e2116 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.981342] env[67008]: DEBUG nova.compute.manager [req-9fde725f-823e-476c-bfb9-6c61b03291b4 req-9fd80e19-5a15-428a-82bd-87ed9edb1306 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Received event network-vif-plugged-e0953683-f723-4691-ae28-9c55b66c6a44 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 839.981549] env[67008]: DEBUG oslo_concurrency.lockutils [req-9fde725f-823e-476c-bfb9-6c61b03291b4 req-9fd80e19-5a15-428a-82bd-87ed9edb1306 service nova] Acquiring lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 839.981766] env[67008]: DEBUG oslo_concurrency.lockutils [req-9fde725f-823e-476c-bfb9-6c61b03291b4 req-9fd80e19-5a15-428a-82bd-87ed9edb1306 service nova] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 839.981929] env[67008]: DEBUG oslo_concurrency.lockutils [req-9fde725f-823e-476c-bfb9-6c61b03291b4 req-9fd80e19-5a15-428a-82bd-87ed9edb1306 service nova] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 839.982114] env[67008]: DEBUG nova.compute.manager [req-9fde725f-823e-476c-bfb9-6c61b03291b4 req-9fd80e19-5a15-428a-82bd-87ed9edb1306 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] No waiting events found dispatching network-vif-plugged-e0953683-f723-4691-ae28-9c55b66c6a44 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 839.982281] env[67008]: WARNING nova.compute.manager [req-9fde725f-823e-476c-bfb9-6c61b03291b4 req-9fd80e19-5a15-428a-82bd-87ed9edb1306 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Received unexpected event network-vif-plugged-e0953683-f723-4691-ae28-9c55b66c6a44 for instance with vm_state building and task_state spawning. [ 839.989792] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Created folder: Instances in parent group-v568038. [ 839.990034] env[67008]: DEBUG oslo.service.loopingcall [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.990814] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 839.991426] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a5badc4-e841-4744-90ac-b4c5ab3048c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.015135] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.015135] env[67008]: value = "task-2824891" [ 840.015135] env[67008]: _type = "Task" [ 840.015135] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.023935] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824891, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.210916] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 840.525699] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824891, 'name': CreateVM_Task, 'duration_secs': 0.321238} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.526018] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 840.526616] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 840.526803] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 840.527132] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 840.527390] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e5d24ae-b04e-4094-bb40-7945655df847 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.532033] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for the task: (returnval){ [ 840.532033] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]527ba47d-ac6f-efdc-642e-1910338e083a" [ 840.532033] env[67008]: _type = "Task" [ 840.532033] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.540131] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]527ba47d-ac6f-efdc-642e-1910338e083a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.047032] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 841.047311] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.047527] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 841.841402] env[67008]: DEBUG oslo_concurrency.lockutils [None req-961eba52-9ffc-4c7f-a06e-531b9f0b565a tempest-FloatingIPsAssociationNegativeTestJSON-1239678447 tempest-FloatingIPsAssociationNegativeTestJSON-1239678447-project-member] Acquiring lock "73213b19-77b2-46c0-b776-c50357e1bd07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 841.841764] env[67008]: DEBUG oslo_concurrency.lockutils [None req-961eba52-9ffc-4c7f-a06e-531b9f0b565a tempest-FloatingIPsAssociationNegativeTestJSON-1239678447 tempest-FloatingIPsAssociationNegativeTestJSON-1239678447-project-member] Lock "73213b19-77b2-46c0-b776-c50357e1bd07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 842.943199] env[67008]: DEBUG nova.compute.manager [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Received event network-changed-e0953683-f723-4691-ae28-9c55b66c6a44 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 842.943480] env[67008]: DEBUG nova.compute.manager [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Refreshing instance network info cache due to event network-changed-e0953683-f723-4691-ae28-9c55b66c6a44. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 842.944057] env[67008]: DEBUG oslo_concurrency.lockutils [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] Acquiring lock "refresh_cache-804b3e12-f8a6-46e7-ba00-93e0da2d23d5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 842.944057] env[67008]: DEBUG oslo_concurrency.lockutils [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] Acquired lock "refresh_cache-804b3e12-f8a6-46e7-ba00-93e0da2d23d5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 842.944057] env[67008]: DEBUG nova.network.neutron [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Refreshing network info cache for port e0953683-f723-4691-ae28-9c55b66c6a44 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 843.136411] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8ace3357-84e5-4bd1-b072-808d26d8929c tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Acquiring lock "82726788-853e-4a03-b16a-2aa0764b9e61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 843.136651] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8ace3357-84e5-4bd1-b072-808d26d8929c tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "82726788-853e-4a03-b16a-2aa0764b9e61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 843.439788] env[67008]: DEBUG nova.network.neutron [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Updated VIF entry in instance network info cache for port e0953683-f723-4691-ae28-9c55b66c6a44. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 843.440155] env[67008]: DEBUG nova.network.neutron [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Updating instance_info_cache with network_info: [{"id": "e0953683-f723-4691-ae28-9c55b66c6a44", "address": "fa:16:3e:23:fb:15", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.49", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0953683-f7", "ovs_interfaceid": "e0953683-f723-4691-ae28-9c55b66c6a44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.450509] env[67008]: DEBUG oslo_concurrency.lockutils [req-128ff9ff-76b6-48a7-8bbc-b06ce7638768 req-01e033bf-7fe0-4903-9338-329a12b35833 service nova] Releasing lock "refresh_cache-804b3e12-f8a6-46e7-ba00-93e0da2d23d5" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 848.219393] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c11a09d7-9d11-4691-9695-be3ebf2a788e tempest-ServerMetadataTestJSON-1076877760 tempest-ServerMetadataTestJSON-1076877760-project-member] Acquiring lock "bdbed593-d3f9-4ee2-af6c-3354c144ed2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 848.219675] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c11a09d7-9d11-4691-9695-be3ebf2a788e tempest-ServerMetadataTestJSON-1076877760 tempest-ServerMetadataTestJSON-1076877760-project-member] Lock "bdbed593-d3f9-4ee2-af6c-3354c144ed2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 849.190681] env[67008]: DEBUG oslo_concurrency.lockutils [None req-067280c1-b15d-4d71-8b3b-658c658bb994 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "c13f0c18-614d-4319-9422-a730eecc0820" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 849.191179] env[67008]: DEBUG oslo_concurrency.lockutils [None req-067280c1-b15d-4d71-8b3b-658c658bb994 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "c13f0c18-614d-4319-9422-a730eecc0820" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 850.161545] env[67008]: DEBUG oslo_concurrency.lockutils [None req-194947b6-21d9-4c7c-b6f0-963b4928d8d2 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] Acquiring lock "b1fd403e-ae51-4a2d-a333-e988ce0c0607" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 850.161807] env[67008]: DEBUG oslo_concurrency.lockutils [None req-194947b6-21d9-4c7c-b6f0-963b4928d8d2 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] Lock "b1fd403e-ae51-4a2d-a333-e988ce0c0607" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 850.733485] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ffeddb0-15c2-48b4-8852-39188185b9c6 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] Acquiring lock "411e08da-5ac7-429a-86b6-942b65d8e28b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 850.733898] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ffeddb0-15c2-48b4-8852-39188185b9c6 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] Lock "411e08da-5ac7-429a-86b6-942b65d8e28b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 852.001753] env[67008]: DEBUG oslo_concurrency.lockutils [None req-94b9cb81-cab0-4e2e-9192-39e922615e43 tempest-ServerAddressesNegativeTestJSON-968289511 tempest-ServerAddressesNegativeTestJSON-968289511-project-member] Acquiring lock "14709db2-f22d-4de3-84f6-be27329c4cc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 852.002871] env[67008]: DEBUG oslo_concurrency.lockutils [None req-94b9cb81-cab0-4e2e-9192-39e922615e43 tempest-ServerAddressesNegativeTestJSON-968289511 tempest-ServerAddressesNegativeTestJSON-968289511-project-member] Lock "14709db2-f22d-4de3-84f6-be27329c4cc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 857.066617] env[67008]: DEBUG oslo_concurrency.lockutils [None req-fd2a1ad3-03f5-4840-9402-b7c056d4c41d tempest-InstanceActionsTestJSON-1445961136 tempest-InstanceActionsTestJSON-1445961136-project-member] Acquiring lock "17bcf1ed-e2b9-402d-a4bb-5d76e7af984f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 857.066942] env[67008]: DEBUG oslo_concurrency.lockutils [None req-fd2a1ad3-03f5-4840-9402-b7c056d4c41d tempest-InstanceActionsTestJSON-1445961136 tempest-InstanceActionsTestJSON-1445961136-project-member] Lock "17bcf1ed-e2b9-402d-a4bb-5d76e7af984f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 873.664319] env[67008]: DEBUG oslo_concurrency.lockutils [None req-917ea9c3-6aa2-45ee-8135-d49f0bef13c4 tempest-ServersNegativeTestMultiTenantJSON-1561418679 tempest-ServersNegativeTestMultiTenantJSON-1561418679-project-member] Acquiring lock "094739c6-639a-4434-a263-bbc62f307918" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 873.664605] env[67008]: DEBUG oslo_concurrency.lockutils [None req-917ea9c3-6aa2-45ee-8135-d49f0bef13c4 tempest-ServersNegativeTestMultiTenantJSON-1561418679 tempest-ServersNegativeTestMultiTenantJSON-1561418679-project-member] Lock "094739c6-639a-4434-a263-bbc62f307918" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 881.213889] env[67008]: WARNING oslo_vmware.rw_handles [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 881.213889] env[67008]: ERROR oslo_vmware.rw_handles [ 881.214514] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 881.216091] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 881.216353] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Copying Virtual Disk [datastore1] vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/f23c6f9a-022b-4534-ae98-976abd497f8b/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 881.216689] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7998d33f-13d6-4d4f-a9f6-886c9ed78cff {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.226164] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Waiting for the task: (returnval){ [ 881.226164] env[67008]: value = "task-2824892" [ 881.226164] env[67008]: _type = "Task" [ 881.226164] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.234321] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Task: {'id': task-2824892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.736694] env[67008]: DEBUG oslo_vmware.exceptions [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 881.737106] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 881.737541] env[67008]: ERROR nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 881.737541] env[67008]: Faults: ['InvalidArgument'] [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Traceback (most recent call last): [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] yield resources [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self.driver.spawn(context, instance, image_meta, [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self._vmops.spawn(context, instance, image_meta, injected_files, [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self._fetch_image_if_missing(context, vi) [ 881.737541] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] image_cache(vi, tmp_image_ds_loc) [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] vm_util.copy_virtual_disk( [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] session._wait_for_task(vmdk_copy_task) [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] return self.wait_for_task(task_ref) [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] return evt.wait() [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] result = hub.switch() [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 881.737954] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] return self.greenlet.switch() [ 881.738377] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 881.738377] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self.f(*self.args, **self.kw) [ 881.738377] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 881.738377] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] raise exceptions.translate_fault(task_info.error) [ 881.738377] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 881.738377] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Faults: ['InvalidArgument'] [ 881.738377] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] [ 881.738377] env[67008]: INFO nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Terminating instance [ 881.739426] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 881.739627] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.740230] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 881.740413] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 881.740634] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2d1cbb9-fd5b-40a6-a47d-75905db4037a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.742865] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc940d37-2295-4292-b058-9e61bb8db28e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.749737] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 881.749960] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c12824af-9dc1-4525-84cb-76a29198c53a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.752093] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.752271] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 881.753205] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed29c1d5-3bdf-4ea2-bd32-ddb4d590caf8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.757753] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 881.757753] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52785baf-983c-1942-4f7b-08372269a328" [ 881.757753] env[67008]: _type = "Task" [ 881.757753] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.770138] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52785baf-983c-1942-4f7b-08372269a328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.827904] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 881.828214] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 881.828311] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Deleting the datastore file [datastore1] 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.828566] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67701d10-adbd-4d41-a3d1-d273fec7b086 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.834249] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Waiting for the task: (returnval){ [ 881.834249] env[67008]: value = "task-2824894" [ 881.834249] env[67008]: _type = "Task" [ 881.834249] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.841793] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Task: {'id': task-2824894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.268869] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 882.269230] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating directory with path [datastore1] vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.269338] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e23d8f2-2335-45b2-a12c-998eae1b18f8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.280722] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Created directory with path [datastore1] vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.280915] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Fetch image to [datastore1] vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 882.281108] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 882.281834] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedc76a1-a83b-48e1-9865-6503dbbcde92 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.288486] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fdc82b-37cc-46f7-b69b-02ca71637f40 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.297638] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709be640-21ae-4909-94c6-beb68c00b2bc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.328813] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d3f5dc-9647-4e06-b76d-659609d0ed71 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.334173] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c86b6b01-0389-463c-b842-6ba5daa05ab5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.343515] env[67008]: DEBUG oslo_vmware.api [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Task: {'id': task-2824894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081503} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.343750] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.343922] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 882.344100] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 882.344274] env[67008]: INFO nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Took 0.60 seconds to destroy the instance on the hypervisor. [ 882.346435] env[67008]: DEBUG nova.compute.claims [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 882.346614] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 882.346837] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 882.369986] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 882.480970] env[67008]: DEBUG oslo_vmware.rw_handles [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 882.539807] env[67008]: DEBUG oslo_vmware.rw_handles [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 882.540009] env[67008]: DEBUG oslo_vmware.rw_handles [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 882.780751] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d57e203-8345-4123-a491-af92a6baa3dc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.789519] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3833e70d-f47a-4bcf-ae16-c684ddd76fbf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.819629] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b194b3-aa44-494e-a448-c4732cdc949b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.827150] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b592f7aa-5025-4f5e-beb5-769c6a9c16dd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.840530] env[67008]: DEBUG nova.compute.provider_tree [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.849089] env[67008]: DEBUG nova.scheduler.client.report [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 882.866878] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.520s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 882.867511] env[67008]: ERROR nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 882.867511] env[67008]: Faults: ['InvalidArgument'] [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Traceback (most recent call last): [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self.driver.spawn(context, instance, image_meta, [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self._vmops.spawn(context, instance, image_meta, injected_files, [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self._fetch_image_if_missing(context, vi) [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] image_cache(vi, tmp_image_ds_loc) [ 882.867511] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] vm_util.copy_virtual_disk( [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] session._wait_for_task(vmdk_copy_task) [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] return self.wait_for_task(task_ref) [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] return evt.wait() [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] result = hub.switch() [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] return self.greenlet.switch() [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 882.867904] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] self.f(*self.args, **self.kw) [ 882.868296] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 882.868296] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] raise exceptions.translate_fault(task_info.error) [ 882.868296] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 882.868296] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Faults: ['InvalidArgument'] [ 882.868296] env[67008]: ERROR nova.compute.manager [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] [ 882.868747] env[67008]: DEBUG nova.compute.utils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 882.870235] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Build of instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 was re-scheduled: A specified parameter was not correct: fileType [ 882.870235] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 882.870687] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 882.870920] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 882.871155] env[67008]: DEBUG nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 882.871378] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 883.410501] env[67008]: DEBUG nova.network.neutron [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.422147] env[67008]: INFO nova.compute.manager [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Took 0.55 seconds to deallocate network for instance. [ 883.530635] env[67008]: INFO nova.scheduler.client.report [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Deleted allocations for instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 [ 883.559403] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3873766-861f-4655-a3a0-b7b600629db0 tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 291.636s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.560587] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 91.185s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 883.560886] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Acquiring lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 883.561145] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 883.561345] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.563256] env[67008]: INFO nova.compute.manager [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Terminating instance [ 883.565070] env[67008]: DEBUG nova.compute.manager [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 883.565495] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 883.565749] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d97539e6-56fe-4ebc-9098-3727ccf49ecf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.575163] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcfd2be-89e1-4c43-9ae5-15ef406edd1f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.586895] env[67008]: DEBUG nova.compute.manager [None req-ecca436e-9738-4024-87e1-73f2325d675e tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: 04badd98-b2f0-483d-82e9-5806dbf8edb3] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 883.609502] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50 could not be found. [ 883.609718] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 883.609896] env[67008]: INFO nova.compute.manager [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Took 0.04 seconds to destroy the instance on the hypervisor. [ 883.610193] env[67008]: DEBUG oslo.service.loopingcall [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.610455] env[67008]: DEBUG nova.compute.manager [-] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 883.610568] env[67008]: DEBUG nova.network.neutron [-] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 883.614013] env[67008]: DEBUG nova.compute.manager [None req-ecca436e-9738-4024-87e1-73f2325d675e tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: 04badd98-b2f0-483d-82e9-5806dbf8edb3] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 883.637470] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ecca436e-9738-4024-87e1-73f2325d675e tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "04badd98-b2f0-483d-82e9-5806dbf8edb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.394s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.650665] env[67008]: DEBUG nova.compute.manager [None req-aeac8b4b-1157-44dd-b431-f96eba868b0c tempest-AttachInterfacesV270Test-777158603 tempest-AttachInterfacesV270Test-777158603-project-member] [instance: 72df790c-0cd5-4054-9162-9f9bd3d19239] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 883.677572] env[67008]: DEBUG nova.compute.manager [None req-aeac8b4b-1157-44dd-b431-f96eba868b0c tempest-AttachInterfacesV270Test-777158603 tempest-AttachInterfacesV270Test-777158603-project-member] [instance: 72df790c-0cd5-4054-9162-9f9bd3d19239] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 883.701797] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aeac8b4b-1157-44dd-b431-f96eba868b0c tempest-AttachInterfacesV270Test-777158603 tempest-AttachInterfacesV270Test-777158603-project-member] Lock "72df790c-0cd5-4054-9162-9f9bd3d19239" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.407s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.710632] env[67008]: DEBUG nova.compute.manager [None req-48b61275-1700-43a6-9cd3-acbce1e16a8e tempest-ServerAddressesTestJSON-1658310963 tempest-ServerAddressesTestJSON-1658310963-project-member] [instance: cb387fbf-affd-4d9f-a4ef-7eef58847130] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 883.720852] env[67008]: DEBUG nova.network.neutron [-] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.730922] env[67008]: INFO nova.compute.manager [-] [instance: 3aa08a8c-5e53-4fd3-9b66-6e6367d31a50] Took 0.12 seconds to deallocate network for instance. [ 883.739034] env[67008]: DEBUG nova.compute.manager [None req-48b61275-1700-43a6-9cd3-acbce1e16a8e tempest-ServerAddressesTestJSON-1658310963 tempest-ServerAddressesTestJSON-1658310963-project-member] [instance: cb387fbf-affd-4d9f-a4ef-7eef58847130] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 883.759214] env[67008]: DEBUG oslo_concurrency.lockutils [None req-48b61275-1700-43a6-9cd3-acbce1e16a8e tempest-ServerAddressesTestJSON-1658310963 tempest-ServerAddressesTestJSON-1658310963-project-member] Lock "cb387fbf-affd-4d9f-a4ef-7eef58847130" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.182s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.774895] env[67008]: DEBUG nova.compute.manager [None req-f3ba1226-5924-4d15-a289-c6ee3ce5f4c9 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 30fbf80d-2893-49cb-b4e8-456d08ce4e3a] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 883.803059] env[67008]: DEBUG nova.compute.manager [None req-f3ba1226-5924-4d15-a289-c6ee3ce5f4c9 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 30fbf80d-2893-49cb-b4e8-456d08ce4e3a] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 883.827915] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f3ba1226-5924-4d15-a289-c6ee3ce5f4c9 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "30fbf80d-2893-49cb-b4e8-456d08ce4e3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.997s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.831600] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2fe97865-aac3-44bf-b12e-0b190226724e tempest-AttachInterfacesUnderV243Test-812491158 tempest-AttachInterfacesUnderV243Test-812491158-project-member] Lock "3aa08a8c-5e53-4fd3-9b66-6e6367d31a50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.271s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.837400] env[67008]: DEBUG nova.compute.manager [None req-56243726-f421-4a84-ae4c-a9b277a69661 tempest-ServerDiagnosticsV248Test-1232884913 tempest-ServerDiagnosticsV248Test-1232884913-project-member] [instance: 2308fd77-6032-4e59-9074-1c18e9b02d87] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 883.862501] env[67008]: DEBUG nova.compute.manager [None req-56243726-f421-4a84-ae4c-a9b277a69661 tempest-ServerDiagnosticsV248Test-1232884913 tempest-ServerDiagnosticsV248Test-1232884913-project-member] [instance: 2308fd77-6032-4e59-9074-1c18e9b02d87] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 883.882978] env[67008]: DEBUG oslo_concurrency.lockutils [None req-56243726-f421-4a84-ae4c-a9b277a69661 tempest-ServerDiagnosticsV248Test-1232884913 tempest-ServerDiagnosticsV248Test-1232884913-project-member] Lock "2308fd77-6032-4e59-9074-1c18e9b02d87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.908s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.891467] env[67008]: DEBUG nova.compute.manager [None req-b611e079-d5b8-4d6d-83af-d781328f665f tempest-ServerGroupTestJSON-1323841408 tempest-ServerGroupTestJSON-1323841408-project-member] [instance: 80863f1b-9c19-4fcc-8692-6015d623e011] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 883.920144] env[67008]: DEBUG nova.compute.manager [None req-b611e079-d5b8-4d6d-83af-d781328f665f tempest-ServerGroupTestJSON-1323841408 tempest-ServerGroupTestJSON-1323841408-project-member] [instance: 80863f1b-9c19-4fcc-8692-6015d623e011] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 883.941192] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b611e079-d5b8-4d6d-83af-d781328f665f tempest-ServerGroupTestJSON-1323841408 tempest-ServerGroupTestJSON-1323841408-project-member] Lock "80863f1b-9c19-4fcc-8692-6015d623e011" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.357s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 883.951157] env[67008]: DEBUG nova.compute.manager [None req-6af15646-b8de-4866-a2c2-2177785b727b tempest-ServerActionsTestJSON-1407308620 tempest-ServerActionsTestJSON-1407308620-project-member] [instance: 411f1932-4397-4aaa-ab21-55ff90342fbb] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 883.976140] env[67008]: DEBUG nova.compute.manager [None req-6af15646-b8de-4866-a2c2-2177785b727b tempest-ServerActionsTestJSON-1407308620 tempest-ServerActionsTestJSON-1407308620-project-member] [instance: 411f1932-4397-4aaa-ab21-55ff90342fbb] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 883.996988] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6af15646-b8de-4866-a2c2-2177785b727b tempest-ServerActionsTestJSON-1407308620 tempest-ServerActionsTestJSON-1407308620-project-member] Lock "411f1932-4397-4aaa-ab21-55ff90342fbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.146s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 884.006050] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 884.052926] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 884.053168] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 884.054621] env[67008]: INFO nova.compute.claims [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.418521] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a460dd-ae9d-4536-93d3-85f2b29a9ffa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.426354] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e543c7-c90a-445e-8cc1-a6c9d64a6498 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.458566] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cc7e83-629a-44b5-a6f9-d4a455f199ca {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.465777] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8b6fda-e1b0-43ff-917d-5216a784b4c5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.479263] env[67008]: DEBUG nova.compute.provider_tree [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.490136] env[67008]: DEBUG nova.scheduler.client.report [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 884.503727] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.450s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 884.504290] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 884.541392] env[67008]: DEBUG nova.compute.utils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.542666] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 884.542843] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 884.552116] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 884.624523] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 884.638592] env[67008]: DEBUG nova.policy [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5343dcbda10b4898b07cfc371ea9e355', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2ffa090d43c4facaec9fcb96575a5f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 884.653142] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 884.653399] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 884.653584] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.653760] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 884.653907] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.654362] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 884.654474] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 884.654665] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 884.654880] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 884.655097] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 884.655311] env[67008]: DEBUG nova.virt.hardware [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 884.657217] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f7249a-40ff-4ecf-8185-f400ca4f3f2e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.665106] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49755bef-db14-4b55-a3a1-eaccf81a4918 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.108409] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Successfully created port: fb09fd86-f47d-490e-a554-e79f7c226bc1 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.436727] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Successfully updated port: fb09fd86-f47d-490e-a554-e79f7c226bc1 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.463151] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "refresh_cache-95604dd8-b797-440e-a844-af44609faa61" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 886.463151] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "refresh_cache-95604dd8-b797-440e-a844-af44609faa61" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 886.463151] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 886.534451] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 886.628947] env[67008]: DEBUG nova.compute.manager [req-8def1a13-0804-4010-9d04-0d2d8f3e66f4 req-b9d1bdea-04da-435a-8031-68b467ed70c0 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] Received event network-vif-plugged-fb09fd86-f47d-490e-a554-e79f7c226bc1 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 886.629434] env[67008]: DEBUG oslo_concurrency.lockutils [req-8def1a13-0804-4010-9d04-0d2d8f3e66f4 req-b9d1bdea-04da-435a-8031-68b467ed70c0 service nova] Acquiring lock "95604dd8-b797-440e-a844-af44609faa61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 886.630542] env[67008]: DEBUG oslo_concurrency.lockutils [req-8def1a13-0804-4010-9d04-0d2d8f3e66f4 req-b9d1bdea-04da-435a-8031-68b467ed70c0 service nova] Lock "95604dd8-b797-440e-a844-af44609faa61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 886.630757] env[67008]: DEBUG oslo_concurrency.lockutils [req-8def1a13-0804-4010-9d04-0d2d8f3e66f4 req-b9d1bdea-04da-435a-8031-68b467ed70c0 service nova] Lock "95604dd8-b797-440e-a844-af44609faa61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 886.630933] env[67008]: DEBUG nova.compute.manager [req-8def1a13-0804-4010-9d04-0d2d8f3e66f4 req-b9d1bdea-04da-435a-8031-68b467ed70c0 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] No waiting events found dispatching network-vif-plugged-fb09fd86-f47d-490e-a554-e79f7c226bc1 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 886.631112] env[67008]: WARNING nova.compute.manager [req-8def1a13-0804-4010-9d04-0d2d8f3e66f4 req-b9d1bdea-04da-435a-8031-68b467ed70c0 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] Received unexpected event network-vif-plugged-fb09fd86-f47d-490e-a554-e79f7c226bc1 for instance with vm_state building and task_state spawning. [ 886.841536] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Updating instance_info_cache with network_info: [{"id": "fb09fd86-f47d-490e-a554-e79f7c226bc1", "address": "fa:16:3e:57:a5:d2", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb09fd86-f4", "ovs_interfaceid": "fb09fd86-f47d-490e-a554-e79f7c226bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.858827] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "refresh_cache-95604dd8-b797-440e-a844-af44609faa61" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 886.859540] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Instance network_info: |[{"id": "fb09fd86-f47d-490e-a554-e79f7c226bc1", "address": "fa:16:3e:57:a5:d2", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb09fd86-f4", "ovs_interfaceid": "fb09fd86-f47d-490e-a554-e79f7c226bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 886.859679] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:a5:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb09fd86-f47d-490e-a554-e79f7c226bc1', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.868044] env[67008]: DEBUG oslo.service.loopingcall [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.868266] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95604dd8-b797-440e-a844-af44609faa61] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 886.868503] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19575aaf-fae8-4a37-934a-88cc70013414 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.891339] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.891339] env[67008]: value = "task-2824895" [ 886.891339] env[67008]: _type = "Task" [ 886.891339] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.900750] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824895, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.123907] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "95604dd8-b797-440e-a844-af44609faa61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 887.401857] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824895, 'name': CreateVM_Task, 'duration_secs': 0.317646} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.401857] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95604dd8-b797-440e-a844-af44609faa61] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 887.402613] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 887.402835] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 887.403170] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 887.403332] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c23d067d-e551-4a69-bde4-ff927896271d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.409471] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 887.409471] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52810fd4-7782-85e2-592e-5803ece735bb" [ 887.409471] env[67008]: _type = "Task" [ 887.409471] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.418475] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52810fd4-7782-85e2-592e-5803ece735bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.924779] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 887.924779] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.924779] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 888.906320] env[67008]: DEBUG nova.compute.manager [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] Received event network-changed-fb09fd86-f47d-490e-a554-e79f7c226bc1 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 888.906548] env[67008]: DEBUG nova.compute.manager [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] Refreshing instance network info cache due to event network-changed-fb09fd86-f47d-490e-a554-e79f7c226bc1. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 888.906768] env[67008]: DEBUG oslo_concurrency.lockutils [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] Acquiring lock "refresh_cache-95604dd8-b797-440e-a844-af44609faa61" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 888.907309] env[67008]: DEBUG oslo_concurrency.lockutils [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] Acquired lock "refresh_cache-95604dd8-b797-440e-a844-af44609faa61" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 888.910690] env[67008]: DEBUG nova.network.neutron [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] Refreshing network info cache for port fb09fd86-f47d-490e-a554-e79f7c226bc1 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 889.407682] env[67008]: DEBUG nova.network.neutron [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] Updated VIF entry in instance network info cache for port fb09fd86-f47d-490e-a554-e79f7c226bc1. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 889.408071] env[67008]: DEBUG nova.network.neutron [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] [instance: 95604dd8-b797-440e-a844-af44609faa61] Updating instance_info_cache with network_info: [{"id": "fb09fd86-f47d-490e-a554-e79f7c226bc1", "address": "fa:16:3e:57:a5:d2", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb09fd86-f4", "ovs_interfaceid": "fb09fd86-f47d-490e-a554-e79f7c226bc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.427752] env[67008]: DEBUG oslo_concurrency.lockutils [req-316f9857-bd70-4699-8e53-6cf0875870bf req-78f4c6ce-303d-4f66-9013-31489af66224 service nova] Releasing lock "refresh_cache-95604dd8-b797-440e-a844-af44609faa61" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 891.856451] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 891.866934] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 891.867176] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 891.867345] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 891.867523] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 891.868699] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f78d247-397e-4abb-86bf-69ba3045b017 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.877421] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446b873f-69dd-44a4-ab0d-f973688b6b6d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.892493] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50b4e19-2e6e-4354-8fae-025a3dd52ae9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.898562] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3bd06e-8b2b-4728-8637-7ece867680a2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.928293] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181063MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 891.928442] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 891.928635] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 892.006462] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5472df57-f2bc-4a90-9251-13760f932d77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.006641] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.006782] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.006902] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.007031] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.007154] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.007269] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.007383] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.007496] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.007609] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 892.020754] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.031983] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.042463] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 50af6f13-9a91-45d9-94db-4e4e84c186a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.053028] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 58b2f966-3574-4318-bb5c-7f9b018ab763 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.063394] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 85627a1c-95e2-4959-81cc-6e25c8c8553d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.073522] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.083173] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance a4246977-28df-49ba-b0f5-3f37930aac5b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.093403] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54b07ba9-b49e-4c00-8775-2edb47ca7b3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.104049] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 73213b19-77b2-46c0-b776-c50357e1bd07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.116128] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 82726788-853e-4a03-b16a-2aa0764b9e61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.126365] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bdbed593-d3f9-4ee2-af6c-3354c144ed2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.136784] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance c13f0c18-614d-4319-9422-a730eecc0820 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.146557] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b1fd403e-ae51-4a2d-a333-e988ce0c0607 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.157229] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 411e08da-5ac7-429a-86b6-942b65d8e28b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.168045] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 14709db2-f22d-4de3-84f6-be27329c4cc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.177423] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 17bcf1ed-e2b9-402d-a4bb-5d76e7af984f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.187047] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 094739c6-639a-4434-a263-bbc62f307918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.187292] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 892.187437] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 892.539957] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d904c4-a791-49bd-b323-23f3f8260706 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.547961] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13802be-ae81-433c-ac9c-b56fd77f48ea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.580752] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f6049d-90da-4cb3-9be1-7b75f313c38a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.588420] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784a8de4-b0c4-4ccc-8d73-7c01f5ec6234 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.602691] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.614169] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 892.641344] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 892.641547] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.713s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 892.652637] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 892.652859] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 893.641488] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.856383] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.856740] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.857171] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 896.857527] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 896.857527] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 896.857860] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 896.878894] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.879081] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.879220] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.879345] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.879470] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.879593] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.879713] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.879835] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.880033] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.880252] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 896.880475] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 896.881267] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.857226] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.857474] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.852573] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.851667] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 927.663281] env[67008]: WARNING oslo_vmware.rw_handles [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 927.663281] env[67008]: ERROR oslo_vmware.rw_handles [ 927.663939] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 927.665643] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 927.665935] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Copying Virtual Disk [datastore1] vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/d14ebfb1-f057-4252-9bc7-5fcdad7da38f/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 927.666238] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09741376-d882-4d89-aa4a-dfc22bb4edc9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.673932] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 927.673932] env[67008]: value = "task-2824896" [ 927.673932] env[67008]: _type = "Task" [ 927.673932] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.682019] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': task-2824896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.184556] env[67008]: DEBUG oslo_vmware.exceptions [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 928.184894] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 928.185499] env[67008]: ERROR nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.185499] env[67008]: Faults: ['InvalidArgument'] [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Traceback (most recent call last): [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] yield resources [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self.driver.spawn(context, instance, image_meta, [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self._fetch_image_if_missing(context, vi) [ 928.185499] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] image_cache(vi, tmp_image_ds_loc) [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] vm_util.copy_virtual_disk( [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] session._wait_for_task(vmdk_copy_task) [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] return self.wait_for_task(task_ref) [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] return evt.wait() [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] result = hub.switch() [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 928.185933] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] return self.greenlet.switch() [ 928.186457] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 928.186457] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self.f(*self.args, **self.kw) [ 928.186457] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 928.186457] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] raise exceptions.translate_fault(task_info.error) [ 928.186457] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 928.186457] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Faults: ['InvalidArgument'] [ 928.186457] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] [ 928.186457] env[67008]: INFO nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Terminating instance [ 928.187379] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 928.187615] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.189628] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d102b5d6-90a1-4e14-9851-a433cda91f8c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.190097] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 928.190367] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 928.191020] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1817a6a5-4849-44be-91c6-f7b771bdcbee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.197892] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 928.198130] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c869052d-6a52-42a4-a7e4-fd42cc2be8f5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.200978] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.200978] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 928.201622] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7af7c906-8ad2-420e-a989-4b78b8394ac2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.206242] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 928.206242] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52944638-96bc-6b81-79c7-c0814ce9ac01" [ 928.206242] env[67008]: _type = "Task" [ 928.206242] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.213676] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52944638-96bc-6b81-79c7-c0814ce9ac01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.287642] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 928.287872] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 928.288073] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Deleting the datastore file [datastore1] 5472df57-f2bc-4a90-9251-13760f932d77 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.288374] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ceaf26d1-8616-4f14-9117-510f1f73f663 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.295055] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 928.295055] env[67008]: value = "task-2824898" [ 928.295055] env[67008]: _type = "Task" [ 928.295055] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.303099] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': task-2824898, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.717053] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 928.717461] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating directory with path [datastore1] vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.717461] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82c85951-4af1-48d9-b125-a3dd5d7b9e94 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.729510] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Created directory with path [datastore1] vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.729510] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Fetch image to [datastore1] vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 928.729680] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 928.730748] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7d6bfe-a008-4e41-b15b-2137baf8130a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.737317] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49dc078b-3262-4bd0-86f8-e340d73937f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.747096] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9478aecd-a157-49e8-9cad-4bde54873066 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.791408] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2342fa-9dc4-41a8-bf9b-7b142fd22d5a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.799958] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ad383855-46c3-4c73-b93e-98645c477198 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.806471] env[67008]: DEBUG oslo_vmware.api [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': task-2824898, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063592} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.806698] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.806881] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 928.807061] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 928.807239] env[67008]: INFO nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Took 0.62 seconds to destroy the instance on the hypervisor. [ 928.809286] env[67008]: DEBUG nova.compute.claims [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 928.809456] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 928.809683] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 928.821260] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 928.873036] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 928.932534] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 928.932729] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 929.237208] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8da2d2-8fc0-4a95-9a93-93f633820ffd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.244904] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2b948a-7497-4b26-bc27-c3e419e4c942 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.274140] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070982f8-38d4-413c-8684-6c8f9ca9a5db {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.281595] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ef277d-8fd9-4476-b4c1-de4a52f71c2c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.295492] env[67008]: DEBUG nova.compute.provider_tree [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.303873] env[67008]: DEBUG nova.scheduler.client.report [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 929.321470] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.512s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 929.322067] env[67008]: ERROR nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 929.322067] env[67008]: Faults: ['InvalidArgument'] [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Traceback (most recent call last): [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self.driver.spawn(context, instance, image_meta, [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self._fetch_image_if_missing(context, vi) [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] image_cache(vi, tmp_image_ds_loc) [ 929.322067] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] vm_util.copy_virtual_disk( [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] session._wait_for_task(vmdk_copy_task) [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] return self.wait_for_task(task_ref) [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] return evt.wait() [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] result = hub.switch() [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] return self.greenlet.switch() [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 929.322422] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] self.f(*self.args, **self.kw) [ 929.322920] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 929.322920] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] raise exceptions.translate_fault(task_info.error) [ 929.322920] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 929.322920] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Faults: ['InvalidArgument'] [ 929.322920] env[67008]: ERROR nova.compute.manager [instance: 5472df57-f2bc-4a90-9251-13760f932d77] [ 929.322920] env[67008]: DEBUG nova.compute.utils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 929.324134] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Build of instance 5472df57-f2bc-4a90-9251-13760f932d77 was re-scheduled: A specified parameter was not correct: fileType [ 929.324134] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 929.324508] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 929.324675] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 929.324840] env[67008]: DEBUG nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 929.324999] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 929.867961] env[67008]: DEBUG nova.network.neutron [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.878996] env[67008]: INFO nova.compute.manager [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 5472df57-f2bc-4a90-9251-13760f932d77] Took 0.55 seconds to deallocate network for instance. [ 929.982897] env[67008]: INFO nova.scheduler.client.report [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Deleted allocations for instance 5472df57-f2bc-4a90-9251-13760f932d77 [ 930.007735] env[67008]: DEBUG oslo_concurrency.lockutils [None req-755b8d41-9016-473c-b781-473f0dbc8ed4 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "5472df57-f2bc-4a90-9251-13760f932d77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 335.169s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 930.022023] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 930.075422] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 930.075422] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 930.076966] env[67008]: INFO nova.compute.claims [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.447942] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afb7d15-e981-462e-b915-ccb88a0ae39e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.455705] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93048e03-cc36-4d97-9423-69065c7684cc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.485987] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8086134-6261-4081-8eaa-7c16a501c9fc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.492873] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d447c88f-d49c-4393-bd56-552e42b65c8a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.505525] env[67008]: DEBUG nova.compute.provider_tree [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.514431] env[67008]: DEBUG nova.scheduler.client.report [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 930.530588] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.455s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 930.531137] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 930.570353] env[67008]: DEBUG nova.compute.utils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.575019] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 930.575019] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 930.581346] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 930.647964] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 930.656163] env[67008]: DEBUG nova.policy [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7f1f490ca7ae48fe9671d81c00838144', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b79ca5a6c5d34a40b5cda8ff66659dfa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 930.676466] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.676728] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.676939] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.677157] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.677311] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.677459] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.677667] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.677826] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.677993] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.678270] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.678493] env[67008]: DEBUG nova.virt.hardware [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.679397] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d06cdf7-d4ae-4ff5-9a1b-dadf3a3ec5c5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.687352] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1d4bfb-861e-4978-978a-e01e88c318a2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.349663] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Successfully created port: ff91025b-6d94-459a-a94a-eb06a9fbaa8f {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 932.360115] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Successfully updated port: ff91025b-6d94-459a-a94a-eb06a9fbaa8f {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 932.370838] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "refresh_cache-01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 932.371007] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquired lock "refresh_cache-01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 932.371171] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 932.437215] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 932.486776] env[67008]: DEBUG nova.compute.manager [req-049f5450-918d-4de4-95dc-7826cc4d7a08 req-320b5bf1-43f0-4457-9a11-2149214385fd service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Received event network-vif-plugged-ff91025b-6d94-459a-a94a-eb06a9fbaa8f {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 932.487191] env[67008]: DEBUG oslo_concurrency.lockutils [req-049f5450-918d-4de4-95dc-7826cc4d7a08 req-320b5bf1-43f0-4457-9a11-2149214385fd service nova] Acquiring lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 932.487290] env[67008]: DEBUG oslo_concurrency.lockutils [req-049f5450-918d-4de4-95dc-7826cc4d7a08 req-320b5bf1-43f0-4457-9a11-2149214385fd service nova] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 932.487444] env[67008]: DEBUG oslo_concurrency.lockutils [req-049f5450-918d-4de4-95dc-7826cc4d7a08 req-320b5bf1-43f0-4457-9a11-2149214385fd service nova] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 932.487615] env[67008]: DEBUG nova.compute.manager [req-049f5450-918d-4de4-95dc-7826cc4d7a08 req-320b5bf1-43f0-4457-9a11-2149214385fd service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] No waiting events found dispatching network-vif-plugged-ff91025b-6d94-459a-a94a-eb06a9fbaa8f {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 932.487775] env[67008]: WARNING nova.compute.manager [req-049f5450-918d-4de4-95dc-7826cc4d7a08 req-320b5bf1-43f0-4457-9a11-2149214385fd service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Received unexpected event network-vif-plugged-ff91025b-6d94-459a-a94a-eb06a9fbaa8f for instance with vm_state building and task_state spawning. [ 932.764201] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Updating instance_info_cache with network_info: [{"id": "ff91025b-6d94-459a-a94a-eb06a9fbaa8f", "address": "fa:16:3e:8c:3f:60", "network": {"id": "38ee87a5-722d-4072-bd57-faa69d522628", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1159209292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b79ca5a6c5d34a40b5cda8ff66659dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff91025b-6d", "ovs_interfaceid": "ff91025b-6d94-459a-a94a-eb06a9fbaa8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.782406] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Releasing lock "refresh_cache-01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 932.782720] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Instance network_info: |[{"id": "ff91025b-6d94-459a-a94a-eb06a9fbaa8f", "address": "fa:16:3e:8c:3f:60", "network": {"id": "38ee87a5-722d-4072-bd57-faa69d522628", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1159209292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b79ca5a6c5d34a40b5cda8ff66659dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff91025b-6d", "ovs_interfaceid": "ff91025b-6d94-459a-a94a-eb06a9fbaa8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 932.783117] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:3f:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff91025b-6d94-459a-a94a-eb06a9fbaa8f', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.790794] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Creating folder: Project (b79ca5a6c5d34a40b5cda8ff66659dfa). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 932.791369] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f552bc5c-b797-4cd3-b2a3-19e5e60d3c97 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.802669] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Created folder: Project (b79ca5a6c5d34a40b5cda8ff66659dfa) in parent group-v567993. [ 932.802859] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Creating folder: Instances. Parent ref: group-v568042. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 932.803098] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8422d992-bf83-449a-b3a0-27f49900e265 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.811864] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Created folder: Instances in parent group-v568042. [ 932.812139] env[67008]: DEBUG oslo.service.loopingcall [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.812353] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 932.812562] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1724bb20-da4e-43c9-b761-0fdcd846a319 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.844839] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.844839] env[67008]: value = "task-2824901" [ 932.844839] env[67008]: _type = "Task" [ 932.844839] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.856145] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824901, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.355626] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824901, 'name': CreateVM_Task, 'duration_secs': 0.307315} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.355816] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 933.357349] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 933.357517] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 933.357837] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 933.358100] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94fea431-04a1-4bf2-86dd-0884042a83f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.362721] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Waiting for the task: (returnval){ [ 933.362721] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5204598f-4c64-73e3-2267-799d4479f945" [ 933.362721] env[67008]: _type = "Task" [ 933.362721] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.377361] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 933.377594] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.377801] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 934.621286] env[67008]: DEBUG nova.compute.manager [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Received event network-changed-ff91025b-6d94-459a-a94a-eb06a9fbaa8f {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 934.621606] env[67008]: DEBUG nova.compute.manager [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Refreshing instance network info cache due to event network-changed-ff91025b-6d94-459a-a94a-eb06a9fbaa8f. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 934.621718] env[67008]: DEBUG oslo_concurrency.lockutils [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] Acquiring lock "refresh_cache-01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 934.621856] env[67008]: DEBUG oslo_concurrency.lockutils [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] Acquired lock "refresh_cache-01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 934.622016] env[67008]: DEBUG nova.network.neutron [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Refreshing network info cache for port ff91025b-6d94-459a-a94a-eb06a9fbaa8f {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 935.116923] env[67008]: DEBUG nova.network.neutron [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Updated VIF entry in instance network info cache for port ff91025b-6d94-459a-a94a-eb06a9fbaa8f. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 935.117393] env[67008]: DEBUG nova.network.neutron [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Updating instance_info_cache with network_info: [{"id": "ff91025b-6d94-459a-a94a-eb06a9fbaa8f", "address": "fa:16:3e:8c:3f:60", "network": {"id": "38ee87a5-722d-4072-bd57-faa69d522628", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1159209292-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b79ca5a6c5d34a40b5cda8ff66659dfa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff91025b-6d", "ovs_interfaceid": "ff91025b-6d94-459a-a94a-eb06a9fbaa8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.126395] env[67008]: DEBUG oslo_concurrency.lockutils [req-f7bb7b15-bc1a-4f25-a547-ee1df5f63744 req-ca1ff711-fb72-41ba-9eb6-8010de3998a8 service nova] Releasing lock "refresh_cache-01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 939.696768] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 951.858954] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 951.872792] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 951.873038] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 951.873235] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 951.873395] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 951.874674] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d48040-6f9e-4efa-aa85-acd93485d47c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.884431] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d40d92-4bf3-4a9f-9cba-aa6baf81d49a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.899936] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cb51ca-0a2c-425f-9d95-d28cf22c9bad {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.906511] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39aacd98-2fe0-4c4c-9534-14fd3ad2107a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.936275] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181075MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 951.936678] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 951.936901] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 952.009526] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.009687] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.009814] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.009939] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.010072] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.010276] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.010317] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.010411] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.010526] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.010636] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.021700] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.031917] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 50af6f13-9a91-45d9-94db-4e4e84c186a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.042026] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 58b2f966-3574-4318-bb5c-7f9b018ab763 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.051384] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 85627a1c-95e2-4959-81cc-6e25c8c8553d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.060813] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.070684] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance a4246977-28df-49ba-b0f5-3f37930aac5b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.079757] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54b07ba9-b49e-4c00-8775-2edb47ca7b3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.089100] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 73213b19-77b2-46c0-b776-c50357e1bd07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.098294] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 82726788-853e-4a03-b16a-2aa0764b9e61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.107158] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bdbed593-d3f9-4ee2-af6c-3354c144ed2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.115881] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance c13f0c18-614d-4319-9422-a730eecc0820 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.124602] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b1fd403e-ae51-4a2d-a333-e988ce0c0607 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.133076] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 411e08da-5ac7-429a-86b6-942b65d8e28b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.141607] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 14709db2-f22d-4de3-84f6-be27329c4cc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.150499] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 17bcf1ed-e2b9-402d-a4bb-5d76e7af984f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.159741] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 094739c6-639a-4434-a263-bbc62f307918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.168395] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.168627] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 952.168772] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 952.479728] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cde1f55-5691-4ce6-a86b-40106894d83b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.487643] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858dffc7-9a83-4ab6-a491-9b8fa197ed91 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.516941] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8250d39e-904f-4a90-aec5-8fe1f127cf60 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.523969] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1771994-9456-4c20-a9c9-2d1412c74dc9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.537358] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.566453] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 952.580410] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 952.580628] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.644s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 955.578579] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 955.578840] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.856400] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.856602] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.856858] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 957.856890] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 957.876371] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.876528] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.876658] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877056] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877205] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877330] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877450] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877567] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877757] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877805] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 957.877908] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 957.878396] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.878584] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.878716] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 958.874408] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.856900] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 977.678138] env[67008]: WARNING oslo_vmware.rw_handles [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 977.678138] env[67008]: ERROR oslo_vmware.rw_handles [ 977.678138] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 977.680091] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 977.680354] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Copying Virtual Disk [datastore1] vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/9a20e987-4b47-4417-bcc3-765478ef7d3c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 977.680660] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3aaec25-343d-41a6-bff4-142a4d02a28c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.690820] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 977.690820] env[67008]: value = "task-2824902" [ 977.690820] env[67008]: _type = "Task" [ 977.690820] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.699528] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': task-2824902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.201191] env[67008]: DEBUG oslo_vmware.exceptions [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 978.201452] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 978.202016] env[67008]: ERROR nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 978.202016] env[67008]: Faults: ['InvalidArgument'] [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Traceback (most recent call last): [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] yield resources [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self.driver.spawn(context, instance, image_meta, [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self._vmops.spawn(context, instance, image_meta, injected_files, [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self._fetch_image_if_missing(context, vi) [ 978.202016] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] image_cache(vi, tmp_image_ds_loc) [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] vm_util.copy_virtual_disk( [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] session._wait_for_task(vmdk_copy_task) [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] return self.wait_for_task(task_ref) [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] return evt.wait() [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] result = hub.switch() [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 978.202360] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] return self.greenlet.switch() [ 978.202714] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 978.202714] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self.f(*self.args, **self.kw) [ 978.202714] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 978.202714] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] raise exceptions.translate_fault(task_info.error) [ 978.202714] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 978.202714] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Faults: ['InvalidArgument'] [ 978.202714] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] [ 978.202714] env[67008]: INFO nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Terminating instance [ 978.203852] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 978.204144] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.204383] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb675774-71f9-4c0c-b043-9fc35b7159ea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.206504] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 978.206692] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 978.207430] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6042ac51-72fd-49ab-8f80-ef36a76f6a73 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.214479] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 978.215431] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05092a68-564c-475e-ab08-7ef215429c09 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.216800] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.216973] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 978.217672] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fcbb442-9e1d-4b9b-a7da-5a7b4f60a506 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.222696] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Waiting for the task: (returnval){ [ 978.222696] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522c1480-22c3-f8f4-4276-5db65a7c500c" [ 978.222696] env[67008]: _type = "Task" [ 978.222696] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.229932] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522c1480-22c3-f8f4-4276-5db65a7c500c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.289288] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 978.289524] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 978.289705] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Deleting the datastore file [datastore1] 72dc7fb5-e94e-4784-9864-a1731ea7c755 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 978.289976] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2502b2e5-ffb4-4b68-ac4e-6dc23658b0be {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.296355] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for the task: (returnval){ [ 978.296355] env[67008]: value = "task-2824904" [ 978.296355] env[67008]: _type = "Task" [ 978.296355] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.303980] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': task-2824904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.735108] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 978.735108] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Creating directory with path [datastore1] vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.735108] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fc9c42a-e931-477e-ae2d-dbee733fe0e6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.747706] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Created directory with path [datastore1] vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.747706] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Fetch image to [datastore1] vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 978.747706] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 978.747706] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb4954b-475b-4120-9594-7c3b77c9322f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.753751] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b003c1e-4244-43f9-8b4d-7ce69364b60d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.762610] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be22b140-b6ba-483d-bdd9-5907c2375f6a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.794163] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390acd28-e916-49f6-90eb-94a6ab37d528 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.806926] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-269f9c9e-16f8-4f51-a83b-60958f2575ce {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.808720] env[67008]: DEBUG oslo_vmware.api [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Task: {'id': task-2824904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072653} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.808973] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.809203] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 978.809379] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 978.809546] env[67008]: INFO nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Took 0.60 seconds to destroy the instance on the hypervisor. [ 978.812006] env[67008]: DEBUG nova.compute.claims [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 978.812216] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 978.812448] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 978.831815] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 978.885558] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 978.945490] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 978.945741] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 979.235383] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219d19ca-3a18-4116-8045-1adcd1312fbc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.241246] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0528de6f-682f-4926-935e-831974fddd06 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.273019] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69a407d-4dac-4943-afb0-9efd476d037c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.278720] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fca8d11-b5b0-49ff-8d10-7aa7e57e484e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.292232] env[67008]: DEBUG nova.compute.provider_tree [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.304591] env[67008]: DEBUG nova.scheduler.client.report [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 979.320382] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.508s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 979.321077] env[67008]: ERROR nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 979.321077] env[67008]: Faults: ['InvalidArgument'] [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Traceback (most recent call last): [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self.driver.spawn(context, instance, image_meta, [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self._vmops.spawn(context, instance, image_meta, injected_files, [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self._fetch_image_if_missing(context, vi) [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] image_cache(vi, tmp_image_ds_loc) [ 979.321077] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] vm_util.copy_virtual_disk( [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] session._wait_for_task(vmdk_copy_task) [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] return self.wait_for_task(task_ref) [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] return evt.wait() [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] result = hub.switch() [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] return self.greenlet.switch() [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 979.321412] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] self.f(*self.args, **self.kw) [ 979.321762] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 979.321762] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] raise exceptions.translate_fault(task_info.error) [ 979.321762] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 979.321762] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Faults: ['InvalidArgument'] [ 979.321762] env[67008]: ERROR nova.compute.manager [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] [ 979.321962] env[67008]: DEBUG nova.compute.utils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 979.323390] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Build of instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 was re-scheduled: A specified parameter was not correct: fileType [ 979.323390] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 979.323769] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 979.323939] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 979.324139] env[67008]: DEBUG nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 979.324321] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 979.824350] env[67008]: DEBUG nova.network.neutron [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.836667] env[67008]: INFO nova.compute.manager [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Took 0.51 seconds to deallocate network for instance. [ 979.949097] env[67008]: INFO nova.scheduler.client.report [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Deleted allocations for instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 [ 979.983674] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7ea6560f-2644-41c9-b8aa-f6a5b39cd451 tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 379.610s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 979.985034] env[67008]: DEBUG oslo_concurrency.lockutils [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 180.049s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 979.985274] env[67008]: DEBUG oslo_concurrency.lockutils [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Acquiring lock "72dc7fb5-e94e-4784-9864-a1731ea7c755-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 979.985485] env[67008]: DEBUG oslo_concurrency.lockutils [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 979.985650] env[67008]: DEBUG oslo_concurrency.lockutils [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 979.988937] env[67008]: INFO nova.compute.manager [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Terminating instance [ 979.991162] env[67008]: DEBUG nova.compute.manager [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 979.991361] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 979.991615] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85f61999-ac4a-4e8c-b1bd-c267441727b1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.000938] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ace3b6-75eb-4ad5-97ad-95b893b95dd5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.015059] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 980.040428] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72dc7fb5-e94e-4784-9864-a1731ea7c755 could not be found. [ 980.044020] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 980.044020] env[67008]: INFO nova.compute.manager [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Took 0.05 seconds to destroy the instance on the hypervisor. [ 980.044020] env[67008]: DEBUG oslo.service.loopingcall [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.044020] env[67008]: DEBUG nova.compute.manager [-] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 980.044020] env[67008]: DEBUG nova.network.neutron [-] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 980.073353] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 980.073353] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 980.074476] env[67008]: INFO nova.compute.claims [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 980.077787] env[67008]: DEBUG nova.network.neutron [-] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.086578] env[67008]: INFO nova.compute.manager [-] [instance: 72dc7fb5-e94e-4784-9864-a1731ea7c755] Took 0.05 seconds to deallocate network for instance. [ 980.181727] env[67008]: DEBUG oslo_concurrency.lockutils [None req-010807ff-ce66-4ca9-8856-d77ba3fb4bfc tempest-ServersAdminTestJSON-956856610 tempest-ServersAdminTestJSON-956856610-project-member] Lock "72dc7fb5-e94e-4784-9864-a1731ea7c755" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 980.457564] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf96e82-6070-4655-823b-c007fe2f6508 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.465661] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b26350d-cd3f-49ca-acc6-03d8643b8ea0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.496931] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac56a45e-7c4c-4a1a-b017-df143ce318cf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.504856] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53177a6-0750-48dc-9134-553437da4831 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.517514] env[67008]: DEBUG nova.compute.provider_tree [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.526353] env[67008]: DEBUG nova.scheduler.client.report [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 980.543629] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.471s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 980.544143] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 980.578564] env[67008]: DEBUG nova.compute.utils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 980.580117] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 980.580363] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 980.590920] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 980.658711] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 980.680927] env[67008]: DEBUG nova.policy [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '76e32fa3ce074e3c8721fc1eacdc4374', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7241d6bb21e2443b9a617cb8787709c6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 980.688651] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.688912] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 980.689082] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 980.689316] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 980.689478] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 980.689627] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 980.689834] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 980.689989] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 980.690166] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 980.690330] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 980.690504] env[67008]: DEBUG nova.virt.hardware [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 980.691916] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6be4e2-9d0d-4394-a249-05c45b07574d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.700876] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b37eed-4ed1-4502-92b3-00eefd534897 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.259188] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Successfully created port: 5544920d-451f-407b-b910-df44b2a954f8 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 981.781822] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Successfully created port: 54b237e0-f36b-476a-9eaa-4ff45f3a0c39 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 983.112576] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Successfully updated port: 5544920d-451f-407b-b910-df44b2a954f8 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.552813] env[67008]: DEBUG nova.compute.manager [req-2b55d86e-0ad4-4d76-a399-e7b977cff77d req-e4879220-d870-45dc-92d2-4493a274f830 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Received event network-vif-plugged-5544920d-451f-407b-b910-df44b2a954f8 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 983.552873] env[67008]: DEBUG oslo_concurrency.lockutils [req-2b55d86e-0ad4-4d76-a399-e7b977cff77d req-e4879220-d870-45dc-92d2-4493a274f830 service nova] Acquiring lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 983.553126] env[67008]: DEBUG oslo_concurrency.lockutils [req-2b55d86e-0ad4-4d76-a399-e7b977cff77d req-e4879220-d870-45dc-92d2-4493a274f830 service nova] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 983.553390] env[67008]: DEBUG oslo_concurrency.lockutils [req-2b55d86e-0ad4-4d76-a399-e7b977cff77d req-e4879220-d870-45dc-92d2-4493a274f830 service nova] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 983.553574] env[67008]: DEBUG nova.compute.manager [req-2b55d86e-0ad4-4d76-a399-e7b977cff77d req-e4879220-d870-45dc-92d2-4493a274f830 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] No waiting events found dispatching network-vif-plugged-5544920d-451f-407b-b910-df44b2a954f8 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 983.553740] env[67008]: WARNING nova.compute.manager [req-2b55d86e-0ad4-4d76-a399-e7b977cff77d req-e4879220-d870-45dc-92d2-4493a274f830 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Received unexpected event network-vif-plugged-5544920d-451f-407b-b910-df44b2a954f8 for instance with vm_state building and task_state spawning. [ 984.720281] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Successfully updated port: 54b237e0-f36b-476a-9eaa-4ff45f3a0c39 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.731790] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 984.731964] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquired lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 984.732132] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 984.837506] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 986.039195] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Updating instance_info_cache with network_info: [{"id": "5544920d-451f-407b-b910-df44b2a954f8", "address": "fa:16:3e:38:38:3c", "network": {"id": "c6865316-5ea0-4dc9-9ad4-e7f8992d06b2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-824338188", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5544920d-45", "ovs_interfaceid": "5544920d-451f-407b-b910-df44b2a954f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "address": "fa:16:3e:6d:e0:74", "network": {"id": "bfd52023-ffc0-44cd-82e2-5fe3a30f2130", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-103791520", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b237e0-f3", "ovs_interfaceid": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.058492] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Releasing lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 986.058883] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Instance network_info: |[{"id": "5544920d-451f-407b-b910-df44b2a954f8", "address": "fa:16:3e:38:38:3c", "network": {"id": "c6865316-5ea0-4dc9-9ad4-e7f8992d06b2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-824338188", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5544920d-45", "ovs_interfaceid": "5544920d-451f-407b-b910-df44b2a954f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "address": "fa:16:3e:6d:e0:74", "network": {"id": "bfd52023-ffc0-44cd-82e2-5fe3a30f2130", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-103791520", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b237e0-f3", "ovs_interfaceid": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 986.059541] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:38:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5544920d-451f-407b-b910-df44b2a954f8', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:e0:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92f3cfd6-c130-4390-8910-865fbc42afd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54b237e0-f36b-476a-9eaa-4ff45f3a0c39', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.068689] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Creating folder: Project (7241d6bb21e2443b9a617cb8787709c6). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 986.069223] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e49b4d3-3ab3-4e7e-9d04-8fb253008cef {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.079014] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Created folder: Project (7241d6bb21e2443b9a617cb8787709c6) in parent group-v567993. [ 986.079204] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Creating folder: Instances. Parent ref: group-v568045. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 986.079422] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2019bd5b-6edc-42c9-8e25-c919f6179637 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.087439] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Created folder: Instances in parent group-v568045. [ 986.087723] env[67008]: DEBUG oslo.service.loopingcall [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.087905] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 986.088109] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7789e7d8-9e52-4881-a0af-a815b4241847 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.108350] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.108350] env[67008]: value = "task-2824907" [ 986.108350] env[67008]: _type = "Task" [ 986.108350] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.117621] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824907, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.429057] env[67008]: DEBUG nova.compute.manager [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Received event network-changed-5544920d-451f-407b-b910-df44b2a954f8 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 986.429490] env[67008]: DEBUG nova.compute.manager [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Refreshing instance network info cache due to event network-changed-5544920d-451f-407b-b910-df44b2a954f8. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 986.429577] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Acquiring lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 986.429660] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Acquired lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 986.429824] env[67008]: DEBUG nova.network.neutron [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Refreshing network info cache for port 5544920d-451f-407b-b910-df44b2a954f8 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.619908] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824907, 'name': CreateVM_Task, 'duration_secs': 0.380944} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.620108] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 986.620878] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 986.621068] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 986.621399] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 986.621648] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b8ab2ac-77a2-43ee-adf7-a93799467f10 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.626124] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Waiting for the task: (returnval){ [ 986.626124] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522b989e-d763-5205-1a6f-7c0f0de4c556" [ 986.626124] env[67008]: _type = "Task" [ 986.626124] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.634282] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522b989e-d763-5205-1a6f-7c0f0de4c556, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.136728] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 987.137144] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.137259] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 987.334917] env[67008]: DEBUG nova.network.neutron [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Updated VIF entry in instance network info cache for port 5544920d-451f-407b-b910-df44b2a954f8. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 987.335350] env[67008]: DEBUG nova.network.neutron [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Updating instance_info_cache with network_info: [{"id": "5544920d-451f-407b-b910-df44b2a954f8", "address": "fa:16:3e:38:38:3c", "network": {"id": "c6865316-5ea0-4dc9-9ad4-e7f8992d06b2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-824338188", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5544920d-45", "ovs_interfaceid": "5544920d-451f-407b-b910-df44b2a954f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "address": "fa:16:3e:6d:e0:74", "network": {"id": "bfd52023-ffc0-44cd-82e2-5fe3a30f2130", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-103791520", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b237e0-f3", "ovs_interfaceid": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.355112] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Releasing lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 987.355287] env[67008]: DEBUG nova.compute.manager [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Received event network-vif-plugged-54b237e0-f36b-476a-9eaa-4ff45f3a0c39 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 987.355485] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Acquiring lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 987.355684] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 987.355842] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 987.356009] env[67008]: DEBUG nova.compute.manager [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] No waiting events found dispatching network-vif-plugged-54b237e0-f36b-476a-9eaa-4ff45f3a0c39 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 987.356292] env[67008]: WARNING nova.compute.manager [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Received unexpected event network-vif-plugged-54b237e0-f36b-476a-9eaa-4ff45f3a0c39 for instance with vm_state building and task_state spawning. [ 987.356497] env[67008]: DEBUG nova.compute.manager [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Received event network-changed-54b237e0-f36b-476a-9eaa-4ff45f3a0c39 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 987.356657] env[67008]: DEBUG nova.compute.manager [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Refreshing instance network info cache due to event network-changed-54b237e0-f36b-476a-9eaa-4ff45f3a0c39. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 987.356840] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Acquiring lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 987.356978] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Acquired lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 987.357161] env[67008]: DEBUG nova.network.neutron [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Refreshing network info cache for port 54b237e0-f36b-476a-9eaa-4ff45f3a0c39 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 988.059101] env[67008]: DEBUG nova.network.neutron [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Updated VIF entry in instance network info cache for port 54b237e0-f36b-476a-9eaa-4ff45f3a0c39. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 988.059101] env[67008]: DEBUG nova.network.neutron [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Updating instance_info_cache with network_info: [{"id": "5544920d-451f-407b-b910-df44b2a954f8", "address": "fa:16:3e:38:38:3c", "network": {"id": "c6865316-5ea0-4dc9-9ad4-e7f8992d06b2", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-824338188", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb143ef7-8271-4a8a-a4aa-8eba9a89f6a1", "external-id": "nsx-vlan-transportzone-504", "segmentation_id": 504, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5544920d-45", "ovs_interfaceid": "5544920d-451f-407b-b910-df44b2a954f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "address": "fa:16:3e:6d:e0:74", "network": {"id": "bfd52023-ffc0-44cd-82e2-5fe3a30f2130", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-103791520", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "7241d6bb21e2443b9a617cb8787709c6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92f3cfd6-c130-4390-8910-865fbc42afd1", "external-id": "nsx-vlan-transportzone-142", "segmentation_id": 142, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54b237e0-f3", "ovs_interfaceid": "54b237e0-f36b-476a-9eaa-4ff45f3a0c39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.070878] env[67008]: DEBUG oslo_concurrency.lockutils [req-d1da2f07-5201-4d0b-b62d-db10583f04a3 req-a453f298-90f1-4e23-8443-4219ed7acca3 service nova] Releasing lock "refresh_cache-3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1004.865521] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1004.865853] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1005.879157] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1005.879454] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1006.882362] env[67008]: DEBUG oslo_concurrency.lockutils [None req-432171d1-6acf-4170-bc52-90b986452db8 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "6d1218c7-1e36-4276-9675-5e15407cbc33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1006.883268] env[67008]: DEBUG oslo_concurrency.lockutils [None req-432171d1-6acf-4170-bc52-90b986452db8 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "6d1218c7-1e36-4276-9675-5e15407cbc33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1012.571062] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b6c4913-8a44-4676-bb22-fb99a7e4f261 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "91f762b8-dbf7-4c6f-b07d-5989da743a88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1012.571338] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b6c4913-8a44-4676-bb22-fb99a7e4f261 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "91f762b8-dbf7-4c6f-b07d-5989da743a88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1012.711998] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Acquiring lock "316742af-0ca9-4695-8216-5c7067e27d7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1012.712248] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Lock "316742af-0ca9-4695-8216-5c7067e27d7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1012.741909] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Acquiring lock "dba9efdd-6e4d-488e-aa38-815f01c4b571" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1012.742154] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Lock "dba9efdd-6e4d-488e-aa38-815f01c4b571" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1012.772408] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Acquiring lock "33e54452-17bb-4141-856a-7e19e2e60dbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1012.772634] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Lock "33e54452-17bb-4141-856a-7e19e2e60dbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1012.980798] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1013.856567] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1013.870893] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1013.871138] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1013.871314] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1013.871532] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1013.874481] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9356e83a-3de8-4e86-a6bb-3a82c85ebc37 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.883318] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2b6cda-0140-4794-ad03-1b593b731136 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.897382] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a3bda3-016c-453a-b1b2-edc0f2bab337 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.904292] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f6aa2a-cb81-4e5a-a24f-ce074c64ef0f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.933556] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181092MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1013.933731] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1013.933942] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1014.008176] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.008392] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.008561] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.008723] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.008879] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.009072] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.009237] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.009391] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.009542] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.009690] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1014.020497] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 50af6f13-9a91-45d9-94db-4e4e84c186a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.030405] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 58b2f966-3574-4318-bb5c-7f9b018ab763 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.039454] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 85627a1c-95e2-4959-81cc-6e25c8c8553d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.048288] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.056966] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance a4246977-28df-49ba-b0f5-3f37930aac5b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.065434] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54b07ba9-b49e-4c00-8775-2edb47ca7b3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.073920] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 73213b19-77b2-46c0-b776-c50357e1bd07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.082518] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 82726788-853e-4a03-b16a-2aa0764b9e61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.091126] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bdbed593-d3f9-4ee2-af6c-3354c144ed2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.099914] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance c13f0c18-614d-4319-9422-a730eecc0820 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.109731] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b1fd403e-ae51-4a2d-a333-e988ce0c0607 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.121067] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 411e08da-5ac7-429a-86b6-942b65d8e28b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.130807] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 14709db2-f22d-4de3-84f6-be27329c4cc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.140859] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 17bcf1ed-e2b9-402d-a4bb-5d76e7af984f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.151133] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 094739c6-639a-4434-a263-bbc62f307918 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.161019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.170558] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.180592] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.190131] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6d1218c7-1e36-4276-9675-5e15407cbc33 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.199319] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 316742af-0ca9-4695-8216-5c7067e27d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.209122] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 91f762b8-dbf7-4c6f-b07d-5989da743a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.219267] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba9efdd-6e4d-488e-aa38-815f01c4b571 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.231349] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 33e54452-17bb-4141-856a-7e19e2e60dbf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1014.231349] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1014.231349] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1014.607443] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4a6825-349f-4658-a396-4239448b3e83 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.615679] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9c9289-ea48-408c-b7a4-c87e0f477819 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.645042] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2707c9a-9df5-4349-9590-9c353e001676 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.651996] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ea42de-5203-4047-9fd1-52e4aece1683 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.665376] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.675424] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1014.689013] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1014.689205] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.755s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1016.689618] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.689901] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.856414] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.857512] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.857807] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.857906] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1018.852374] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.856621] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.856951] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1019.856951] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1019.882413] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.882562] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.882694] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.882832] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.882980] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.883347] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.883496] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.883620] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.883738] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.883855] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1019.883971] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1021.856500] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.852636] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.264884] env[67008]: WARNING oslo_vmware.rw_handles [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1026.264884] env[67008]: ERROR oslo_vmware.rw_handles [ 1026.265546] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1026.271026] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1026.271026] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Copying Virtual Disk [datastore1] vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/b7646326-8b0c-42f1-89e1-d9b7d91a940c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1026.271026] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2174f75a-8d7c-47be-a05a-a56a05063ca8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.275372] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Waiting for the task: (returnval){ [ 1026.275372] env[67008]: value = "task-2824908" [ 1026.275372] env[67008]: _type = "Task" [ 1026.275372] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.283452] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Task: {'id': task-2824908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.660737] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f92c2104-a2f6-4c86-9b35-bd16be3e092b tempest-ServerMetadataNegativeTestJSON-320936444 tempest-ServerMetadataNegativeTestJSON-320936444-project-member] Acquiring lock "07abda77-2e28-4bac-a36b-dc837208c28f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1026.661388] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f92c2104-a2f6-4c86-9b35-bd16be3e092b tempest-ServerMetadataNegativeTestJSON-320936444 tempest-ServerMetadataNegativeTestJSON-320936444-project-member] Lock "07abda77-2e28-4bac-a36b-dc837208c28f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1026.796048] env[67008]: DEBUG oslo_vmware.exceptions [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1026.796048] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1026.796048] env[67008]: ERROR nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.796048] env[67008]: Faults: ['InvalidArgument'] [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Traceback (most recent call last): [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] yield resources [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self.driver.spawn(context, instance, image_meta, [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self._fetch_image_if_missing(context, vi) [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] image_cache(vi, tmp_image_ds_loc) [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] vm_util.copy_virtual_disk( [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] session._wait_for_task(vmdk_copy_task) [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] return self.wait_for_task(task_ref) [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] return evt.wait() [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] result = hub.switch() [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] return self.greenlet.switch() [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self.f(*self.args, **self.kw) [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] raise exceptions.translate_fault(task_info.error) [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Faults: ['InvalidArgument'] [ 1026.796048] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] [ 1026.796048] env[67008]: INFO nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Terminating instance [ 1026.797110] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1026.797110] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.797110] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1026.798259] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1026.798605] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7368dc9d-1e34-4894-a6d9-d1940e0a939a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.801249] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5693d235-9f0c-4094-a1c4-2966196dacd0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.810422] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1026.811833] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a4cd954-af82-42fa-ba2b-320bbafcdbba {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.813771] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.814102] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1026.814904] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-966a8374-c0b6-405d-85b2-c8d7ebdee658 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.821251] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Waiting for the task: (returnval){ [ 1026.821251] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52cd66da-94e1-e9b0-071a-87484a7508f2" [ 1026.821251] env[67008]: _type = "Task" [ 1026.821251] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.830395] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52cd66da-94e1-e9b0-071a-87484a7508f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.888034] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1026.888034] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1026.888034] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Deleting the datastore file [datastore1] b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.888034] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c372eee-6e95-44c5-b91c-178583b25fd9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.894095] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Waiting for the task: (returnval){ [ 1026.894095] env[67008]: value = "task-2824910" [ 1026.894095] env[67008]: _type = "Task" [ 1026.894095] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.903569] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Task: {'id': task-2824910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.332404] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1027.334509] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Creating directory with path [datastore1] vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.334509] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4f54899-440c-4cda-874c-dfdb40ed737f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.345064] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Created directory with path [datastore1] vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.345179] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Fetch image to [datastore1] vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1027.345341] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1027.346089] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb30c4f-8246-4ca6-94c7-08ded838883f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.353924] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcd92f2-0942-49fc-87a1-1421c061ed46 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.364645] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d46517-7c0d-4751-bb3f-0a3c01c177f0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.400137] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77501ac-2303-454e-a21e-3706de63fcb2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.407970] env[67008]: DEBUG oslo_vmware.api [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Task: {'id': task-2824910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070863} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.409754] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.410032] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1027.410286] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1027.410474] env[67008]: INFO nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1027.412410] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bcd1dd6c-d5eb-4c80-856c-8274ab9f3595 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.414549] env[67008]: DEBUG nova.compute.claims [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1027.414816] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1027.415130] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1027.438091] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1027.510465] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1027.570918] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1027.570918] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1027.939675] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67c33ac-6cdc-487f-bca4-25e06ce852e8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.947098] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a483ff9-0684-4aca-91ee-f2e07e0db1a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.980747] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d386b74a-f343-4534-a19c-755b33704b2f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.988210] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6159b8-f675-4e07-a6b2-53edb1c25c9f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.001095] env[67008]: DEBUG nova.compute.provider_tree [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.009414] env[67008]: DEBUG nova.scheduler.client.report [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1028.025930] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.611s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.026465] env[67008]: ERROR nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1028.026465] env[67008]: Faults: ['InvalidArgument'] [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Traceback (most recent call last): [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self.driver.spawn(context, instance, image_meta, [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self._fetch_image_if_missing(context, vi) [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] image_cache(vi, tmp_image_ds_loc) [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] vm_util.copy_virtual_disk( [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] session._wait_for_task(vmdk_copy_task) [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] return self.wait_for_task(task_ref) [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] return evt.wait() [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] result = hub.switch() [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] return self.greenlet.switch() [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] self.f(*self.args, **self.kw) [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] raise exceptions.translate_fault(task_info.error) [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Faults: ['InvalidArgument'] [ 1028.026465] env[67008]: ERROR nova.compute.manager [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] [ 1028.027332] env[67008]: DEBUG nova.compute.utils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1028.028476] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Build of instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 was re-scheduled: A specified parameter was not correct: fileType [ 1028.028476] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1028.028842] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1028.029021] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1028.029193] env[67008]: DEBUG nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1028.029354] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.443916] env[67008]: DEBUG nova.network.neutron [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.467447] env[67008]: INFO nova.compute.manager [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Took 0.44 seconds to deallocate network for instance. [ 1028.606762] env[67008]: INFO nova.scheduler.client.report [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Deleted allocations for instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 [ 1028.629397] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ac6e7c3-4213-4837-b551-0e9c389ce97b tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 429.578s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.630621] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 229.971s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1028.630842] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Acquiring lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1028.631051] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1028.631222] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.633505] env[67008]: INFO nova.compute.manager [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Terminating instance [ 1028.635244] env[67008]: DEBUG nova.compute.manager [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1028.635438] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1028.636120] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63f11e8c-8542-410b-8396-73df40cc37d6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.646640] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0538828e-fe56-42f9-938b-f56bb0b61b20 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.658904] env[67008]: DEBUG nova.compute.manager [None req-1a5e6332-6649-4613-a43b-38d8d0a8978c tempest-ServerActionsTestOtherA-2029932528 tempest-ServerActionsTestOtherA-2029932528-project-member] [instance: 50af6f13-9a91-45d9-94db-4e4e84c186a8] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1028.684622] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b2ee12a6-9af9-4d13-aefd-f9585b53cdb8 could not be found. [ 1028.684831] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1028.685014] env[67008]: INFO nova.compute.manager [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1028.685487] env[67008]: DEBUG oslo.service.loopingcall [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.685707] env[67008]: DEBUG nova.compute.manager [-] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1028.686849] env[67008]: DEBUG nova.network.neutron [-] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.692350] env[67008]: DEBUG nova.compute.manager [None req-1a5e6332-6649-4613-a43b-38d8d0a8978c tempest-ServerActionsTestOtherA-2029932528 tempest-ServerActionsTestOtherA-2029932528-project-member] [instance: 50af6f13-9a91-45d9-94db-4e4e84c186a8] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1028.712203] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1a5e6332-6649-4613-a43b-38d8d0a8978c tempest-ServerActionsTestOtherA-2029932528 tempest-ServerActionsTestOtherA-2029932528-project-member] Lock "50af6f13-9a91-45d9-94db-4e4e84c186a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.651s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.722622] env[67008]: DEBUG nova.network.neutron [-] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.727112] env[67008]: DEBUG nova.compute.manager [None req-08967cdb-bf5f-461b-a00d-6fe168e2b6c3 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] [instance: 58b2f966-3574-4318-bb5c-7f9b018ab763] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1028.733928] env[67008]: INFO nova.compute.manager [-] [instance: b2ee12a6-9af9-4d13-aefd-f9585b53cdb8] Took 0.05 seconds to deallocate network for instance. [ 1028.753594] env[67008]: DEBUG nova.compute.manager [None req-08967cdb-bf5f-461b-a00d-6fe168e2b6c3 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] [instance: 58b2f966-3574-4318-bb5c-7f9b018ab763] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1028.774683] env[67008]: DEBUG oslo_concurrency.lockutils [None req-08967cdb-bf5f-461b-a00d-6fe168e2b6c3 tempest-MigrationsAdminTest-1330455508 tempest-MigrationsAdminTest-1330455508-project-member] Lock "58b2f966-3574-4318-bb5c-7f9b018ab763" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.949s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.784741] env[67008]: DEBUG nova.compute.manager [None req-ce8fbe7e-44e2-497b-a689-1a6d4871d359 tempest-ServerDiagnosticsNegativeTest-921604856 tempest-ServerDiagnosticsNegativeTest-921604856-project-member] [instance: 85627a1c-95e2-4959-81cc-6e25c8c8553d] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1028.816443] env[67008]: DEBUG nova.compute.manager [None req-ce8fbe7e-44e2-497b-a689-1a6d4871d359 tempest-ServerDiagnosticsNegativeTest-921604856 tempest-ServerDiagnosticsNegativeTest-921604856-project-member] [instance: 85627a1c-95e2-4959-81cc-6e25c8c8553d] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1028.834558] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96838bc9-73d1-4416-ac56-51743e754927 tempest-ImagesOneServerTestJSON-1270131940 tempest-ImagesOneServerTestJSON-1270131940-project-member] Lock "b2ee12a6-9af9-4d13-aefd-f9585b53cdb8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.843517] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ce8fbe7e-44e2-497b-a689-1a6d4871d359 tempest-ServerDiagnosticsNegativeTest-921604856 tempest-ServerDiagnosticsNegativeTest-921604856-project-member] Lock "85627a1c-95e2-4959-81cc-6e25c8c8553d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.628s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1028.851949] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1028.899730] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1028.900982] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1028.901557] env[67008]: INFO nova.compute.claims [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.322958] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e81c41-9256-4f72-a1bf-fe0193ac1260 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.330225] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9c21ff-3d7b-44e4-ade5-526293f94fc5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.362135] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4f97ae-6227-4a9b-bbfd-5853f4b83705 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.369668] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216ec5f4-92b9-492b-990b-0e05f4255c81 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.383844] env[67008]: DEBUG nova.compute.provider_tree [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.395021] env[67008]: DEBUG nova.scheduler.client.report [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1029.415043] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.515s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1029.415636] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1029.462539] env[67008]: DEBUG nova.compute.utils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1029.463962] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1029.463962] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1029.476703] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1029.573424] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1029.602115] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1029.602115] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1029.602115] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.602115] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1029.602115] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.602115] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1029.602517] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1029.602847] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1029.603181] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1029.603477] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1029.603776] env[67008]: DEBUG nova.virt.hardware [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1029.605086] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ddb970-2d7b-4ebf-8069-2ab0774677c2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.615036] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1605d914-3ef4-4d84-9bf1-df9eb95e7ed3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.644016] env[67008]: DEBUG nova.policy [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '406a538398ad4d679333e4a36d42f477', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a6bb413a4014fbba2206200a7ff7d8d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1030.702627] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Successfully created port: 0c52ad4e-2475-483a-81b5-cb35518a9a80 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.093115] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1032.471722] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Successfully updated port: 0c52ad4e-2475-483a-81b5-cb35518a9a80 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.487737] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1032.487891] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquired lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1032.488053] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.595880] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1032.926763] env[67008]: DEBUG nova.compute.manager [req-f1c99178-4577-4eba-a931-899ad1bc4652 req-eddd3af9-05e0-4013-be00-d3ab06f7df41 service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Received event network-vif-plugged-0c52ad4e-2475-483a-81b5-cb35518a9a80 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1032.926918] env[67008]: DEBUG oslo_concurrency.lockutils [req-f1c99178-4577-4eba-a931-899ad1bc4652 req-eddd3af9-05e0-4013-be00-d3ab06f7df41 service nova] Acquiring lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1032.927135] env[67008]: DEBUG oslo_concurrency.lockutils [req-f1c99178-4577-4eba-a931-899ad1bc4652 req-eddd3af9-05e0-4013-be00-d3ab06f7df41 service nova] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1032.928024] env[67008]: DEBUG oslo_concurrency.lockutils [req-f1c99178-4577-4eba-a931-899ad1bc4652 req-eddd3af9-05e0-4013-be00-d3ab06f7df41 service nova] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1032.928024] env[67008]: DEBUG nova.compute.manager [req-f1c99178-4577-4eba-a931-899ad1bc4652 req-eddd3af9-05e0-4013-be00-d3ab06f7df41 service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] No waiting events found dispatching network-vif-plugged-0c52ad4e-2475-483a-81b5-cb35518a9a80 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1032.928024] env[67008]: WARNING nova.compute.manager [req-f1c99178-4577-4eba-a931-899ad1bc4652 req-eddd3af9-05e0-4013-be00-d3ab06f7df41 service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Received unexpected event network-vif-plugged-0c52ad4e-2475-483a-81b5-cb35518a9a80 for instance with vm_state building and task_state deleting. [ 1033.269440] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Updating instance_info_cache with network_info: [{"id": "0c52ad4e-2475-483a-81b5-cb35518a9a80", "address": "fa:16:3e:8f:00:1d", "network": {"id": "2b00019b-ff6b-4fd0-a98d-3e70b813f337", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-721160875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a6bb413a4014fbba2206200a7ff7d8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c52ad4e-24", "ovs_interfaceid": "0c52ad4e-2475-483a-81b5-cb35518a9a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.287572] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Releasing lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1033.287883] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance network_info: |[{"id": "0c52ad4e-2475-483a-81b5-cb35518a9a80", "address": "fa:16:3e:8f:00:1d", "network": {"id": "2b00019b-ff6b-4fd0-a98d-3e70b813f337", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-721160875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a6bb413a4014fbba2206200a7ff7d8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c52ad4e-24", "ovs_interfaceid": "0c52ad4e-2475-483a-81b5-cb35518a9a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1033.288308] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:00:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c52ad4e-2475-483a-81b5-cb35518a9a80', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.298201] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Creating folder: Project (7a6bb413a4014fbba2206200a7ff7d8d). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1033.298768] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-278ed921-10c8-486d-9b71-d8b94c99141a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.308814] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Created folder: Project (7a6bb413a4014fbba2206200a7ff7d8d) in parent group-v567993. [ 1033.308997] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Creating folder: Instances. Parent ref: group-v568048. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1033.309240] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07f9fb03-80e1-438c-b7ca-7ae9af1a3426 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.318965] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Created folder: Instances in parent group-v568048. [ 1033.319218] env[67008]: DEBUG oslo.service.loopingcall [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.319400] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1033.319595] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-187c8857-3d5d-4640-9e60-bb07744ca066 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.340285] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.340285] env[67008]: value = "task-2824913" [ 1033.340285] env[67008]: _type = "Task" [ 1033.340285] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.349972] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824913, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.849624] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824913, 'name': CreateVM_Task, 'duration_secs': 0.316645} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.850062] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1033.850850] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1033.851186] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1033.851634] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1033.854019] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ca1f1a1-87f2-4d30-ae6e-ce2761373417 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.857293] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Waiting for the task: (returnval){ [ 1033.857293] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c3243b-e905-b389-7809-2364d6e002e9" [ 1033.857293] env[67008]: _type = "Task" [ 1033.857293] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.865598] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c3243b-e905-b389-7809-2364d6e002e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.370193] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1034.370193] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.371430] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1035.379756] env[67008]: DEBUG nova.compute.manager [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Received event network-changed-0c52ad4e-2475-483a-81b5-cb35518a9a80 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1035.379985] env[67008]: DEBUG nova.compute.manager [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Refreshing instance network info cache due to event network-changed-0c52ad4e-2475-483a-81b5-cb35518a9a80. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1035.380182] env[67008]: DEBUG oslo_concurrency.lockutils [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] Acquiring lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1035.380324] env[67008]: DEBUG oslo_concurrency.lockutils [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] Acquired lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1035.380482] env[67008]: DEBUG nova.network.neutron [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Refreshing network info cache for port 0c52ad4e-2475-483a-81b5-cb35518a9a80 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1035.949115] env[67008]: DEBUG nova.network.neutron [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Updated VIF entry in instance network info cache for port 0c52ad4e-2475-483a-81b5-cb35518a9a80. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1035.949115] env[67008]: DEBUG nova.network.neutron [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Updating instance_info_cache with network_info: [{"id": "0c52ad4e-2475-483a-81b5-cb35518a9a80", "address": "fa:16:3e:8f:00:1d", "network": {"id": "2b00019b-ff6b-4fd0-a98d-3e70b813f337", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-721160875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a6bb413a4014fbba2206200a7ff7d8d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c52ad4e-24", "ovs_interfaceid": "0c52ad4e-2475-483a-81b5-cb35518a9a80", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.962549] env[67008]: DEBUG oslo_concurrency.lockutils [req-6dd51cb2-4f50-4632-97cc-59a9ad2e0ed0 req-c3096307-239c-4a21-a731-164ac597beca service nova] Releasing lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1037.753627] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "1f040a29-196b-4a5c-808f-53dc56f3facc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1037.753953] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1043.279892] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ebf0717f-0544-4cc0-9bec-e51647669d25 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Acquiring lock "b976b36c-2847-483a-babb-77e58cdf3932" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1043.280242] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ebf0717f-0544-4cc0-9bec-e51647669d25 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "b976b36c-2847-483a-babb-77e58cdf3932" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1049.328979] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1db2b624-a2cf-4080-b168-98c6168b085d tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "0761816d-194d-44ab-97f0-4214157a1edf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1049.329562] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1db2b624-a2cf-4080-b168-98c6168b085d tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "0761816d-194d-44ab-97f0-4214157a1edf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1050.573529] env[67008]: DEBUG oslo_concurrency.lockutils [None req-13525e14-b0b4-4cc9-a2f0-983dc2ec1711 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Acquiring lock "31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1050.573947] env[67008]: DEBUG oslo_concurrency.lockutils [None req-13525e14-b0b4-4cc9-a2f0-983dc2ec1711 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Lock "31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1050.912701] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Acquiring lock "6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1050.912937] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1050.938662] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Acquiring lock "e3812a2c-b59a-48cb-90b5-0b185351d3b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1050.938842] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "e3812a2c-b59a-48cb-90b5-0b185351d3b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1052.862305] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4193674c-d760-41c5-bba9-ba11fdf5178f tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "fad05213-ac21-499c-b7fb-1929e9b3fca5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1052.862628] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4193674c-d760-41c5-bba9-ba11fdf5178f tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "fad05213-ac21-499c-b7fb-1929e9b3fca5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1060.160284] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a4463281-319a-40ba-8ca3-05c35681654d tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] Acquiring lock "9617d5d7-7977-475c-8375-1a59ed302444" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1060.160597] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a4463281-319a-40ba-8ca3-05c35681654d tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] Lock "9617d5d7-7977-475c-8375-1a59ed302444" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1062.660503] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5fc68f6c-ccf3-4d01-8292-6e8aa9761f9e tempest-ServerPasswordTestJSON-1865102295 tempest-ServerPasswordTestJSON-1865102295-project-member] Acquiring lock "0a52230a-d439-45dd-a908-bd698f94e841" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1062.660761] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5fc68f6c-ccf3-4d01-8292-6e8aa9761f9e tempest-ServerPasswordTestJSON-1865102295 tempest-ServerPasswordTestJSON-1865102295-project-member] Lock "0a52230a-d439-45dd-a908-bd698f94e841" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1073.857625] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1073.872851] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1073.873376] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1073.875051] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1073.875051] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1073.875051] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a277bb4c-019c-40d2-887c-944d371bb163 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.884776] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a70a049-3628-486a-84a9-13a184c27a99 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.900519] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3703ac-aa45-4ec5-a01e-6f147f99c4a6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.908466] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25b3c2e-27ee-47b1-8b1e-2dcc0fde6cab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.941888] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181083MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1073.941888] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1073.942377] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1074.028255] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.028445] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.028575] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.028696] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.028815] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.028932] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.029060] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.029180] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.029293] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.029404] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1074.046760] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.060929] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.071844] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.082149] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6d1218c7-1e36-4276-9675-5e15407cbc33 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.092008] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 316742af-0ca9-4695-8216-5c7067e27d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.104649] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 91f762b8-dbf7-4c6f-b07d-5989da743a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.115290] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba9efdd-6e4d-488e-aa38-815f01c4b571 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.125676] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 33e54452-17bb-4141-856a-7e19e2e60dbf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.136206] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 07abda77-2e28-4bac-a36b-dc837208c28f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.147112] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.160290] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b976b36c-2847-483a-babb-77e58cdf3932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.173127] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.190624] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.206379] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e3812a2c-b59a-48cb-90b5-0b185351d3b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.218073] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fad05213-ac21-499c-b7fb-1929e9b3fca5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.231334] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9617d5d7-7977-475c-8375-1a59ed302444 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.244035] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0a52230a-d439-45dd-a908-bd698f94e841 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1074.244035] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1074.244035] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1074.644283] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8a5ecf-cda7-4a37-baa8-e1c98ae1672e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.652969] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b055fff5-3b79-4215-b852-b0cfc39c3af2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.683585] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbe2113-5ae0-4b1d-812e-1ef0fa906bdb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.690630] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14dbc08-db8b-4338-8111-7245b838c7c8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.704565] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.712880] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1074.732677] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1074.732864] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.791s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1076.280744] env[67008]: WARNING oslo_vmware.rw_handles [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1076.280744] env[67008]: ERROR oslo_vmware.rw_handles [ 1076.281446] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1076.283092] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1076.283334] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Copying Virtual Disk [datastore1] vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/98ff1e62-3357-4597-8f8a-b369ec226e95/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1076.283617] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c5f6d2e-8f90-4cd1-8e95-ef3f006f2b60 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.291315] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Waiting for the task: (returnval){ [ 1076.291315] env[67008]: value = "task-2824914" [ 1076.291315] env[67008]: _type = "Task" [ 1076.291315] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.298830] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Task: {'id': task-2824914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.801236] env[67008]: DEBUG oslo_vmware.exceptions [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1076.801512] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1076.802073] env[67008]: ERROR nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1076.802073] env[67008]: Faults: ['InvalidArgument'] [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Traceback (most recent call last): [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] yield resources [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self.driver.spawn(context, instance, image_meta, [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self._fetch_image_if_missing(context, vi) [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] image_cache(vi, tmp_image_ds_loc) [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] vm_util.copy_virtual_disk( [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] session._wait_for_task(vmdk_copy_task) [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] return self.wait_for_task(task_ref) [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] return evt.wait() [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] result = hub.switch() [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] return self.greenlet.switch() [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self.f(*self.args, **self.kw) [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] raise exceptions.translate_fault(task_info.error) [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Faults: ['InvalidArgument'] [ 1076.802073] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] [ 1076.802946] env[67008]: INFO nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Terminating instance [ 1076.803930] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1076.804162] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.804410] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75fe4781-aa90-4c16-9a77-8938b8bdbb12 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.806595] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1076.806785] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1076.807538] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0291ea-9ac7-4832-ba3f-73a374690482 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.814584] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1076.815630] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64c60018-34c4-4d25-af28-bb1bde133ef3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.817030] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.817264] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1076.817935] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec6cb906-d9c3-4686-ad75-717419b41a06 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.822616] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Waiting for the task: (returnval){ [ 1076.822616] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ecb1f1-89dc-64c9-0bf0-c65531b27383" [ 1076.822616] env[67008]: _type = "Task" [ 1076.822616] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.830012] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ecb1f1-89dc-64c9-0bf0-c65531b27383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.884375] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1076.884608] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1076.884790] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Deleting the datastore file [datastore1] 6ca0b308-d3d1-49bd-8ce1-813017b3c833 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.885081] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09121c2a-82dc-42bc-8e17-170b673a48c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.891549] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Waiting for the task: (returnval){ [ 1076.891549] env[67008]: value = "task-2824916" [ 1076.891549] env[67008]: _type = "Task" [ 1076.891549] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.899563] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Task: {'id': task-2824916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.333449] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1077.333750] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Creating directory with path [datastore1] vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1077.333913] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59c54029-6ec6-45ef-8fc9-1d43297598e8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.344798] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Created directory with path [datastore1] vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1077.344981] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Fetch image to [datastore1] vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1077.345165] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1077.345867] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037bda91-e2bf-48f6-a30c-921fc2365c88 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.351978] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cbbe28-2288-40b4-98d1-dd56b680c9ee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.360839] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981de34d-e561-4acc-970c-c0f8b85ce124 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.392123] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f624ae8-a5b6-4251-bdd9-9458a8ab833d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.402539] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-05a11ef0-069d-4165-9e46-32149ed58291 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.404148] env[67008]: DEBUG oslo_vmware.api [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Task: {'id': task-2824916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074591} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.404389] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.404574] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1077.404828] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1077.405123] env[67008]: INFO nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1077.407284] env[67008]: DEBUG nova.compute.claims [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1077.407464] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1077.407678] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1077.491681] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1077.552051] env[67008]: DEBUG oslo_vmware.rw_handles [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1077.611132] env[67008]: DEBUG oslo_vmware.rw_handles [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1077.611333] env[67008]: DEBUG oslo_vmware.rw_handles [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1077.732017] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.732150] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.813968] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8279769b-ec88-4466-8ebe-eee9c359f7d9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.821714] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b606fb50-79df-49fa-a4ed-5c23afafafea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.850369] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd8d9df-5a16-49f9-a007-5e5289153783 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.856365] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.856463] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.857597] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db6d9cb-15b9-45ae-9fb3-24270196dcf3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.861238] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.861392] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1077.871532] env[67008]: DEBUG nova.compute.provider_tree [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.879612] env[67008]: DEBUG nova.scheduler.client.report [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1077.893537] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.486s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1077.894088] env[67008]: ERROR nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1077.894088] env[67008]: Faults: ['InvalidArgument'] [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Traceback (most recent call last): [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self.driver.spawn(context, instance, image_meta, [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self._fetch_image_if_missing(context, vi) [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] image_cache(vi, tmp_image_ds_loc) [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] vm_util.copy_virtual_disk( [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] session._wait_for_task(vmdk_copy_task) [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] return self.wait_for_task(task_ref) [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] return evt.wait() [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] result = hub.switch() [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] return self.greenlet.switch() [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] self.f(*self.args, **self.kw) [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] raise exceptions.translate_fault(task_info.error) [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Faults: ['InvalidArgument'] [ 1077.894088] env[67008]: ERROR nova.compute.manager [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] [ 1077.894981] env[67008]: DEBUG nova.compute.utils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1077.896114] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Build of instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 was re-scheduled: A specified parameter was not correct: fileType [ 1077.896114] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1077.896493] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1077.896663] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1077.896813] env[67008]: DEBUG nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1077.896969] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1078.295549] env[67008]: DEBUG nova.network.neutron [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.305874] env[67008]: INFO nova.compute.manager [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Took 0.41 seconds to deallocate network for instance. [ 1078.395777] env[67008]: INFO nova.scheduler.client.report [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Deleted allocations for instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 [ 1078.415889] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a08ea0da-22ac-4fc4-98b0-eeab4c404f21 tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 477.876s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.417491] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 278.132s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1078.417491] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Acquiring lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1078.417679] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1078.417737] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.419830] env[67008]: INFO nova.compute.manager [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Terminating instance [ 1078.421548] env[67008]: DEBUG nova.compute.manager [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1078.422988] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1078.422988] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a7a02699-5b76-499e-935a-d60bb5f0a869 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.431933] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce560bef-23dd-4137-b1f9-9b24654926dd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.443192] env[67008]: DEBUG nova.compute.manager [None req-203518ab-81bc-4d75-95eb-c17ab8e19a24 tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] [instance: a4246977-28df-49ba-b0f5-3f37930aac5b] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.464132] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ca0b308-d3d1-49bd-8ce1-813017b3c833 could not be found. [ 1078.464404] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1078.464641] env[67008]: INFO nova.compute.manager [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1078.464911] env[67008]: DEBUG oslo.service.loopingcall [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1078.465151] env[67008]: DEBUG nova.compute.manager [-] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1078.465266] env[67008]: DEBUG nova.network.neutron [-] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1078.469889] env[67008]: DEBUG nova.compute.manager [None req-203518ab-81bc-4d75-95eb-c17ab8e19a24 tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] [instance: a4246977-28df-49ba-b0f5-3f37930aac5b] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.491598] env[67008]: DEBUG nova.network.neutron [-] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.496137] env[67008]: DEBUG oslo_concurrency.lockutils [None req-203518ab-81bc-4d75-95eb-c17ab8e19a24 tempest-VolumesAdminNegativeTest-608012675 tempest-VolumesAdminNegativeTest-608012675-project-member] Lock "a4246977-28df-49ba-b0f5-3f37930aac5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.374s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.500846] env[67008]: INFO nova.compute.manager [-] [instance: 6ca0b308-d3d1-49bd-8ce1-813017b3c833] Took 0.04 seconds to deallocate network for instance. [ 1078.506609] env[67008]: DEBUG nova.compute.manager [None req-d9e29e43-6988-40c2-88a3-947906217526 tempest-ServersV294TestFqdnHostnames-347460946 tempest-ServersV294TestFqdnHostnames-347460946-project-member] [instance: 54b07ba9-b49e-4c00-8775-2edb47ca7b3d] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.530187] env[67008]: DEBUG nova.compute.manager [None req-d9e29e43-6988-40c2-88a3-947906217526 tempest-ServersV294TestFqdnHostnames-347460946 tempest-ServersV294TestFqdnHostnames-347460946-project-member] [instance: 54b07ba9-b49e-4c00-8775-2edb47ca7b3d] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.551399] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d9e29e43-6988-40c2-88a3-947906217526 tempest-ServersV294TestFqdnHostnames-347460946 tempest-ServersV294TestFqdnHostnames-347460946-project-member] Lock "54b07ba9-b49e-4c00-8775-2edb47ca7b3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.726s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.560937] env[67008]: DEBUG nova.compute.manager [None req-961eba52-9ffc-4c7f-a06e-531b9f0b565a tempest-FloatingIPsAssociationNegativeTestJSON-1239678447 tempest-FloatingIPsAssociationNegativeTestJSON-1239678447-project-member] [instance: 73213b19-77b2-46c0-b776-c50357e1bd07] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.590996] env[67008]: DEBUG nova.compute.manager [None req-961eba52-9ffc-4c7f-a06e-531b9f0b565a tempest-FloatingIPsAssociationNegativeTestJSON-1239678447 tempest-FloatingIPsAssociationNegativeTestJSON-1239678447-project-member] [instance: 73213b19-77b2-46c0-b776-c50357e1bd07] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.602909] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8e46e915-174d-47dc-bcf2-5cb298ecae9c tempest-ServersAdminNegativeTestJSON-252801803 tempest-ServersAdminNegativeTestJSON-252801803-project-member] Lock "6ca0b308-d3d1-49bd-8ce1-813017b3c833" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.614839] env[67008]: DEBUG oslo_concurrency.lockutils [None req-961eba52-9ffc-4c7f-a06e-531b9f0b565a tempest-FloatingIPsAssociationNegativeTestJSON-1239678447 tempest-FloatingIPsAssociationNegativeTestJSON-1239678447-project-member] Lock "73213b19-77b2-46c0-b776-c50357e1bd07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.773s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.627628] env[67008]: DEBUG nova.compute.manager [None req-8ace3357-84e5-4bd1-b072-808d26d8929c tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: 82726788-853e-4a03-b16a-2aa0764b9e61] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.650489] env[67008]: DEBUG nova.compute.manager [None req-8ace3357-84e5-4bd1-b072-808d26d8929c tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: 82726788-853e-4a03-b16a-2aa0764b9e61] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.669821] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8ace3357-84e5-4bd1-b072-808d26d8929c tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "82726788-853e-4a03-b16a-2aa0764b9e61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.533s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.681700] env[67008]: DEBUG nova.compute.manager [None req-c11a09d7-9d11-4691-9695-be3ebf2a788e tempest-ServerMetadataTestJSON-1076877760 tempest-ServerMetadataTestJSON-1076877760-project-member] [instance: bdbed593-d3f9-4ee2-af6c-3354c144ed2e] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.721208] env[67008]: DEBUG nova.compute.manager [None req-c11a09d7-9d11-4691-9695-be3ebf2a788e tempest-ServerMetadataTestJSON-1076877760 tempest-ServerMetadataTestJSON-1076877760-project-member] [instance: bdbed593-d3f9-4ee2-af6c-3354c144ed2e] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.741814] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c11a09d7-9d11-4691-9695-be3ebf2a788e tempest-ServerMetadataTestJSON-1076877760 tempest-ServerMetadataTestJSON-1076877760-project-member] Lock "bdbed593-d3f9-4ee2-af6c-3354c144ed2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.522s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.749966] env[67008]: DEBUG nova.compute.manager [None req-067280c1-b15d-4d71-8b3b-658c658bb994 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: c13f0c18-614d-4319-9422-a730eecc0820] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.772995] env[67008]: DEBUG nova.compute.manager [None req-067280c1-b15d-4d71-8b3b-658c658bb994 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: c13f0c18-614d-4319-9422-a730eecc0820] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.791893] env[67008]: DEBUG oslo_concurrency.lockutils [None req-067280c1-b15d-4d71-8b3b-658c658bb994 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "c13f0c18-614d-4319-9422-a730eecc0820" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.601s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.802918] env[67008]: DEBUG nova.compute.manager [None req-194947b6-21d9-4c7c-b6f0-963b4928d8d2 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] [instance: b1fd403e-ae51-4a2d-a333-e988ce0c0607] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.824337] env[67008]: DEBUG nova.compute.manager [None req-194947b6-21d9-4c7c-b6f0-963b4928d8d2 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] [instance: b1fd403e-ae51-4a2d-a333-e988ce0c0607] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.845508] env[67008]: DEBUG oslo_concurrency.lockutils [None req-194947b6-21d9-4c7c-b6f0-963b4928d8d2 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] Lock "b1fd403e-ae51-4a2d-a333-e988ce0c0607" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.684s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.853905] env[67008]: DEBUG nova.compute.manager [None req-1ffeddb0-15c2-48b4-8852-39188185b9c6 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] [instance: 411e08da-5ac7-429a-86b6-942b65d8e28b] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.891782] env[67008]: DEBUG nova.compute.manager [None req-1ffeddb0-15c2-48b4-8852-39188185b9c6 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] [instance: 411e08da-5ac7-429a-86b6-942b65d8e28b] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.912262] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ffeddb0-15c2-48b4-8852-39188185b9c6 tempest-ListImageFiltersTestJSON-1599536104 tempest-ListImageFiltersTestJSON-1599536104-project-member] Lock "411e08da-5ac7-429a-86b6-942b65d8e28b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.178s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.920414] env[67008]: DEBUG nova.compute.manager [None req-94b9cb81-cab0-4e2e-9192-39e922615e43 tempest-ServerAddressesNegativeTestJSON-968289511 tempest-ServerAddressesNegativeTestJSON-968289511-project-member] [instance: 14709db2-f22d-4de3-84f6-be27329c4cc1] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.943679] env[67008]: DEBUG nova.compute.manager [None req-94b9cb81-cab0-4e2e-9192-39e922615e43 tempest-ServerAddressesNegativeTestJSON-968289511 tempest-ServerAddressesNegativeTestJSON-968289511-project-member] [instance: 14709db2-f22d-4de3-84f6-be27329c4cc1] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1078.963423] env[67008]: DEBUG oslo_concurrency.lockutils [None req-94b9cb81-cab0-4e2e-9192-39e922615e43 tempest-ServerAddressesNegativeTestJSON-968289511 tempest-ServerAddressesNegativeTestJSON-968289511-project-member] Lock "14709db2-f22d-4de3-84f6-be27329c4cc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.961s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1078.971434] env[67008]: DEBUG nova.compute.manager [None req-fd2a1ad3-03f5-4840-9402-b7c056d4c41d tempest-InstanceActionsTestJSON-1445961136 tempest-InstanceActionsTestJSON-1445961136-project-member] [instance: 17bcf1ed-e2b9-402d-a4bb-5d76e7af984f] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1078.992165] env[67008]: DEBUG nova.compute.manager [None req-fd2a1ad3-03f5-4840-9402-b7c056d4c41d tempest-InstanceActionsTestJSON-1445961136 tempest-InstanceActionsTestJSON-1445961136-project-member] [instance: 17bcf1ed-e2b9-402d-a4bb-5d76e7af984f] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1079.011537] env[67008]: DEBUG oslo_concurrency.lockutils [None req-fd2a1ad3-03f5-4840-9402-b7c056d4c41d tempest-InstanceActionsTestJSON-1445961136 tempest-InstanceActionsTestJSON-1445961136-project-member] Lock "17bcf1ed-e2b9-402d-a4bb-5d76e7af984f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.945s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.019473] env[67008]: DEBUG nova.compute.manager [None req-917ea9c3-6aa2-45ee-8135-d49f0bef13c4 tempest-ServersNegativeTestMultiTenantJSON-1561418679 tempest-ServersNegativeTestMultiTenantJSON-1561418679-project-member] [instance: 094739c6-639a-4434-a263-bbc62f307918] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1079.041471] env[67008]: DEBUG nova.compute.manager [None req-917ea9c3-6aa2-45ee-8135-d49f0bef13c4 tempest-ServersNegativeTestMultiTenantJSON-1561418679 tempest-ServersNegativeTestMultiTenantJSON-1561418679-project-member] [instance: 094739c6-639a-4434-a263-bbc62f307918] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1079.061133] env[67008]: DEBUG oslo_concurrency.lockutils [None req-917ea9c3-6aa2-45ee-8135-d49f0bef13c4 tempest-ServersNegativeTestMultiTenantJSON-1561418679 tempest-ServersNegativeTestMultiTenantJSON-1561418679-project-member] Lock "094739c6-639a-4434-a263-bbc62f307918" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.396s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.070829] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1079.123627] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1079.123909] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1079.125892] env[67008]: INFO nova.compute.claims [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.453464] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd604b12-b1cf-44e2-8730-82ef887aac6e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.461417] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dc08fa-2d92-41c6-9c6d-ad66e3ec9f53 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.491248] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3bf2b6-6be2-4543-8040-84ccfd621c32 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.498841] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8770abbb-8cb0-44e1-b6a9-ff5eacbe21ef {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.512077] env[67008]: DEBUG nova.compute.provider_tree [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.523773] env[67008]: DEBUG nova.scheduler.client.report [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1079.537853] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.414s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1079.538358] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1079.576764] env[67008]: DEBUG nova.compute.utils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1079.578381] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1079.578381] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1079.587124] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1079.667031] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1079.694035] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1079.694397] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1079.694439] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.694615] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1079.694762] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.694908] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1079.695147] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1079.695390] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1079.695579] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1079.695745] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1079.695914] env[67008]: DEBUG nova.virt.hardware [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1079.696772] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba74f222-787f-483b-8ef4-61134015e8ef {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.704755] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2832dff4-fef3-4d71-ac05-055fa0892925 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.731102] env[67008]: DEBUG nova.policy [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '89307c5a51a04a25b077f23a926d34fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e529b26b441a4d5b92bec1d2104451dd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1079.857264] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.402363] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Successfully created port: 3bf602f3-8269-471e-b326-3077fa4dade6 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.856985] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.857287] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1080.857460] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1080.881281] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.881437] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.881568] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.881697] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.881821] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.881941] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.882161] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.882305] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.882430] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.882547] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1080.882666] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1081.097756] env[67008]: DEBUG nova.compute.manager [req-1adadeef-0523-468b-8698-949f9ec334a9 req-4e89bc8a-9c55-4057-9fa5-da6073bac0fe service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Received event network-vif-plugged-3bf602f3-8269-471e-b326-3077fa4dade6 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1081.097982] env[67008]: DEBUG oslo_concurrency.lockutils [req-1adadeef-0523-468b-8698-949f9ec334a9 req-4e89bc8a-9c55-4057-9fa5-da6073bac0fe service nova] Acquiring lock "3c10ac79-441a-467c-a3aa-fdb9a9451698-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1081.098200] env[67008]: DEBUG oslo_concurrency.lockutils [req-1adadeef-0523-468b-8698-949f9ec334a9 req-4e89bc8a-9c55-4057-9fa5-da6073bac0fe service nova] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1081.098374] env[67008]: DEBUG oslo_concurrency.lockutils [req-1adadeef-0523-468b-8698-949f9ec334a9 req-4e89bc8a-9c55-4057-9fa5-da6073bac0fe service nova] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1081.098553] env[67008]: DEBUG nova.compute.manager [req-1adadeef-0523-468b-8698-949f9ec334a9 req-4e89bc8a-9c55-4057-9fa5-da6073bac0fe service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] No waiting events found dispatching network-vif-plugged-3bf602f3-8269-471e-b326-3077fa4dade6 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1081.098716] env[67008]: WARNING nova.compute.manager [req-1adadeef-0523-468b-8698-949f9ec334a9 req-4e89bc8a-9c55-4057-9fa5-da6073bac0fe service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Received unexpected event network-vif-plugged-3bf602f3-8269-471e-b326-3077fa4dade6 for instance with vm_state building and task_state spawning. [ 1081.215746] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Successfully updated port: 3bf602f3-8269-471e-b326-3077fa4dade6 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.226412] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "refresh_cache-3c10ac79-441a-467c-a3aa-fdb9a9451698" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1081.226563] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquired lock "refresh_cache-3c10ac79-441a-467c-a3aa-fdb9a9451698" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1081.226718] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1081.264065] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1081.733129] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Updating instance_info_cache with network_info: [{"id": "3bf602f3-8269-471e-b326-3077fa4dade6", "address": "fa:16:3e:46:5c:c2", "network": {"id": "48ffcb1f-2b24-4bfb-8099-6ebe9788eebe", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1912372735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e529b26b441a4d5b92bec1d2104451dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf602f3-82", "ovs_interfaceid": "3bf602f3-8269-471e-b326-3077fa4dade6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.744981] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Releasing lock "refresh_cache-3c10ac79-441a-467c-a3aa-fdb9a9451698" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1081.744981] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Instance network_info: |[{"id": "3bf602f3-8269-471e-b326-3077fa4dade6", "address": "fa:16:3e:46:5c:c2", "network": {"id": "48ffcb1f-2b24-4bfb-8099-6ebe9788eebe", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1912372735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e529b26b441a4d5b92bec1d2104451dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf602f3-82", "ovs_interfaceid": "3bf602f3-8269-471e-b326-3077fa4dade6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1081.745420] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:5c:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a4d142-3f97-47fe-b074-58923c46815e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3bf602f3-8269-471e-b326-3077fa4dade6', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.752883] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Creating folder: Project (e529b26b441a4d5b92bec1d2104451dd). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1081.753427] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7455bbe7-7c93-4ad0-be27-3f044c14dd05 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.764804] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Created folder: Project (e529b26b441a4d5b92bec1d2104451dd) in parent group-v567993. [ 1081.764981] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Creating folder: Instances. Parent ref: group-v568051. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1081.765236] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20f96e78-0a7f-4434-a8cf-4ba94f3ba603 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.773938] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Created folder: Instances in parent group-v568051. [ 1081.774183] env[67008]: DEBUG oslo.service.loopingcall [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1081.774367] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1081.774625] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b47cbd9-e8bf-46ae-a94d-53d9ab3edb41 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.792463] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.792463] env[67008]: value = "task-2824919" [ 1081.792463] env[67008]: _type = "Task" [ 1081.792463] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.799782] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824919, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.302263] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824919, 'name': CreateVM_Task, 'duration_secs': 0.282382} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.302547] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1082.303152] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1082.303323] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1082.303642] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1082.303893] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a56d3b78-6fbd-42ae-b95d-c24c8caf63dd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.308231] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Waiting for the task: (returnval){ [ 1082.308231] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]524383cc-7836-b39b-4d2f-b927102ec334" [ 1082.308231] env[67008]: _type = "Task" [ 1082.308231] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.315255] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]524383cc-7836-b39b-4d2f-b927102ec334, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.820896] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1082.821152] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.821366] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1082.856061] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.154976] env[67008]: DEBUG nova.compute.manager [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Received event network-changed-3bf602f3-8269-471e-b326-3077fa4dade6 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1083.155321] env[67008]: DEBUG nova.compute.manager [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Refreshing instance network info cache due to event network-changed-3bf602f3-8269-471e-b326-3077fa4dade6. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1083.155572] env[67008]: DEBUG oslo_concurrency.lockutils [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] Acquiring lock "refresh_cache-3c10ac79-441a-467c-a3aa-fdb9a9451698" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1083.155715] env[67008]: DEBUG oslo_concurrency.lockutils [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] Acquired lock "refresh_cache-3c10ac79-441a-467c-a3aa-fdb9a9451698" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1083.155880] env[67008]: DEBUG nova.network.neutron [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Refreshing network info cache for port 3bf602f3-8269-471e-b326-3077fa4dade6 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1083.445812] env[67008]: DEBUG nova.network.neutron [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Updated VIF entry in instance network info cache for port 3bf602f3-8269-471e-b326-3077fa4dade6. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1083.446201] env[67008]: DEBUG nova.network.neutron [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Updating instance_info_cache with network_info: [{"id": "3bf602f3-8269-471e-b326-3077fa4dade6", "address": "fa:16:3e:46:5c:c2", "network": {"id": "48ffcb1f-2b24-4bfb-8099-6ebe9788eebe", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1912372735-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e529b26b441a4d5b92bec1d2104451dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3bf602f3-82", "ovs_interfaceid": "3bf602f3-8269-471e-b326-3077fa4dade6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.456173] env[67008]: DEBUG oslo_concurrency.lockutils [req-cf3f7459-62dc-4354-9058-b08d4ecefb54 req-56e99cf8-5fdd-4dc7-abb2-9b00e4cb3b88 service nova] Releasing lock "refresh_cache-3c10ac79-441a-467c-a3aa-fdb9a9451698" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1084.440189] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1084.440428] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1088.780399] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1126.233894] env[67008]: WARNING oslo_vmware.rw_handles [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1126.233894] env[67008]: ERROR oslo_vmware.rw_handles [ 1126.234542] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1126.236277] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1126.236519] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Copying Virtual Disk [datastore1] vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/bbfa815d-9cdc-4212-a2d4-23373920250c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1126.236803] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35bc7958-46a9-42e0-b438-1d29db4634c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.245323] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Waiting for the task: (returnval){ [ 1126.245323] env[67008]: value = "task-2824920" [ 1126.245323] env[67008]: _type = "Task" [ 1126.245323] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.253113] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Task: {'id': task-2824920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.757050] env[67008]: DEBUG oslo_vmware.exceptions [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1126.757050] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1126.757706] env[67008]: ERROR nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1126.757706] env[67008]: Faults: ['InvalidArgument'] [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Traceback (most recent call last): [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] yield resources [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self.driver.spawn(context, instance, image_meta, [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self._fetch_image_if_missing(context, vi) [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] image_cache(vi, tmp_image_ds_loc) [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] vm_util.copy_virtual_disk( [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] session._wait_for_task(vmdk_copy_task) [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] return self.wait_for_task(task_ref) [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] return evt.wait() [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] result = hub.switch() [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] return self.greenlet.switch() [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self.f(*self.args, **self.kw) [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] raise exceptions.translate_fault(task_info.error) [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Faults: ['InvalidArgument'] [ 1126.757706] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] [ 1126.758895] env[67008]: INFO nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Terminating instance [ 1126.759671] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1126.759874] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.760135] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-863b7d1e-5fa7-416f-a42d-28170d73a6e0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.762476] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1126.762665] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1126.763399] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3f22ba-8999-4c2f-b045-550c1d29dfd0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.770195] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1126.770430] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01dffa41-7d1f-4146-a169-277ff39871cf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.773495] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.773672] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1126.774617] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaa6f7f5-6425-4197-b9c5-7beb708e9a8f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.780329] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Waiting for the task: (returnval){ [ 1126.780329] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]529bdf1e-7ea9-618a-9473-4a8f92c7467a" [ 1126.780329] env[67008]: _type = "Task" [ 1126.780329] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.787164] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]529bdf1e-7ea9-618a-9473-4a8f92c7467a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.840582] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1126.840823] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1126.841019] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Deleting the datastore file [datastore1] 40a26f4e-0be9-4770-83a7-31c87dbf921f {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1126.841331] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9127f192-9d56-4c88-85f8-e9c3abb7f4eb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.846927] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Waiting for the task: (returnval){ [ 1126.846927] env[67008]: value = "task-2824922" [ 1126.846927] env[67008]: _type = "Task" [ 1126.846927] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.854385] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Task: {'id': task-2824922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.291473] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1127.291804] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Creating directory with path [datastore1] vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1127.291940] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdb6d86d-a2c9-446d-ac8a-d57811e4eac2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.306101] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Created directory with path [datastore1] vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1127.306300] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Fetch image to [datastore1] vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1127.306469] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1127.307203] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f776733b-37f1-4d67-90e3-e8bc174b3a8f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.313635] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63de994a-3d7f-460b-9a1c-1655fc08db80 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.322356] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b6d12a-aefc-4471-9203-9f48e914eaad {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.354948] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bd5e7c-1377-4c09-842e-f784d236afa7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.361669] env[67008]: DEBUG oslo_vmware.api [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Task: {'id': task-2824922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063531} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.363035] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1127.363233] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1127.363407] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1127.363577] env[67008]: INFO nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1127.365310] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e132d01b-40cf-475c-94e5-8efbd01d0387 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.368055] env[67008]: DEBUG nova.compute.claims [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1127.368055] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1127.368055] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1127.388798] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1127.441833] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1127.501885] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1127.502089] env[67008]: DEBUG oslo_vmware.rw_handles [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1127.755012] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd62a59-59c4-4956-8fe4-3b602ca319e3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.764355] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169f9691-175a-41f2-8d0e-cd6845d6b75e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.794229] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57377ffc-07a6-4ba6-b141-a4e9d958553a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.801438] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4f52df-b470-4b54-bd9c-5e4cd493dc5d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.814546] env[67008]: DEBUG nova.compute.provider_tree [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.823179] env[67008]: DEBUG nova.scheduler.client.report [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1127.838376] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.471s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1127.838891] env[67008]: ERROR nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1127.838891] env[67008]: Faults: ['InvalidArgument'] [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Traceback (most recent call last): [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self.driver.spawn(context, instance, image_meta, [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self._fetch_image_if_missing(context, vi) [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] image_cache(vi, tmp_image_ds_loc) [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] vm_util.copy_virtual_disk( [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] session._wait_for_task(vmdk_copy_task) [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] return self.wait_for_task(task_ref) [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] return evt.wait() [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] result = hub.switch() [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] return self.greenlet.switch() [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] self.f(*self.args, **self.kw) [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] raise exceptions.translate_fault(task_info.error) [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Faults: ['InvalidArgument'] [ 1127.838891] env[67008]: ERROR nova.compute.manager [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] [ 1127.839695] env[67008]: DEBUG nova.compute.utils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1127.840943] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Build of instance 40a26f4e-0be9-4770-83a7-31c87dbf921f was re-scheduled: A specified parameter was not correct: fileType [ 1127.840943] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1127.841328] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1127.841501] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1127.841665] env[67008]: DEBUG nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1127.841827] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1128.391317] env[67008]: DEBUG nova.network.neutron [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.404019] env[67008]: INFO nova.compute.manager [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Took 0.56 seconds to deallocate network for instance. [ 1128.519384] env[67008]: INFO nova.scheduler.client.report [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Deleted allocations for instance 40a26f4e-0be9-4770-83a7-31c87dbf921f [ 1128.541995] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4cbb553f-38a2-42de-84d9-335034a05211 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 526.805s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1128.542908] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 327.771s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1128.543176] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Acquiring lock "40a26f4e-0be9-4770-83a7-31c87dbf921f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1128.543401] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1128.543572] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1128.546112] env[67008]: INFO nova.compute.manager [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Terminating instance [ 1128.547487] env[67008]: DEBUG nova.compute.manager [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1128.547677] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1128.548348] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd2f6186-00ed-428d-be4b-fcaa8dff7284 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.555978] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1128.561561] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f33468-2318-495c-a281-0b5e5825c2f2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.593177] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40a26f4e-0be9-4770-83a7-31c87dbf921f could not be found. [ 1128.593388] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1128.593562] env[67008]: INFO nova.compute.manager [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1128.593802] env[67008]: DEBUG oslo.service.loopingcall [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1128.598057] env[67008]: DEBUG nova.compute.manager [-] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1128.598169] env[67008]: DEBUG nova.network.neutron [-] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1128.610949] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1128.611200] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1128.612697] env[67008]: INFO nova.compute.claims [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1128.621851] env[67008]: DEBUG nova.network.neutron [-] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.632238] env[67008]: INFO nova.compute.manager [-] [instance: 40a26f4e-0be9-4770-83a7-31c87dbf921f] Took 0.03 seconds to deallocate network for instance. [ 1128.717568] env[67008]: DEBUG oslo_concurrency.lockutils [None req-11ef58a3-1c7a-4078-9c93-689f8ea35088 tempest-InstanceActionsNegativeTestJSON-141745699 tempest-InstanceActionsNegativeTestJSON-141745699-project-member] Lock "40a26f4e-0be9-4770-83a7-31c87dbf921f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1128.930788] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5210da05-60de-4699-bd6d-aeff0271e548 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.938204] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d60ee60-065d-49c2-a73c-b6dc18bcdfc2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.968616] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0995ab81-bf23-4aae-89db-91342ebf4ad9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.975655] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257db7da-6f67-4138-ae10-986b4b573c10 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.988425] env[67008]: DEBUG nova.compute.provider_tree [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.998609] env[67008]: DEBUG nova.scheduler.client.report [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1129.011437] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.400s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1129.011881] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1129.045360] env[67008]: DEBUG nova.compute.utils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1129.046849] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1129.047723] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1129.056212] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1129.123523] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1129.134469] env[67008]: DEBUG nova.policy [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '459237510a15413cb1a772592db6165d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed062493109b4e2e8d86864d605c2af3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1129.151929] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1129.152185] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1129.152343] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.152524] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1129.152668] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.152812] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1129.153074] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1129.153261] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1129.153434] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1129.153595] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1129.153764] env[67008]: DEBUG nova.virt.hardware [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1129.154771] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423131c6-b0f1-4800-a151-f378b91d9398 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.163085] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c41978-a19b-494d-a95a-77d53bc18d72 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.720236] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Successfully created port: be93d43b-8fea-456a-9442-6ce3961bfcdb {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1130.515036] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Successfully updated port: be93d43b-8fea-456a-9442-6ce3961bfcdb {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1130.523897] env[67008]: DEBUG nova.compute.manager [req-6009cca9-bf77-43bd-a09c-6d356c99277c req-a4af3357-e1c9-4857-b9c6-76e4b4f2e235 service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Received event network-vif-plugged-be93d43b-8fea-456a-9442-6ce3961bfcdb {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1130.524153] env[67008]: DEBUG oslo_concurrency.lockutils [req-6009cca9-bf77-43bd-a09c-6d356c99277c req-a4af3357-e1c9-4857-b9c6-76e4b4f2e235 service nova] Acquiring lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1130.524373] env[67008]: DEBUG oslo_concurrency.lockutils [req-6009cca9-bf77-43bd-a09c-6d356c99277c req-a4af3357-e1c9-4857-b9c6-76e4b4f2e235 service nova] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1130.524542] env[67008]: DEBUG oslo_concurrency.lockutils [req-6009cca9-bf77-43bd-a09c-6d356c99277c req-a4af3357-e1c9-4857-b9c6-76e4b4f2e235 service nova] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1130.524706] env[67008]: DEBUG nova.compute.manager [req-6009cca9-bf77-43bd-a09c-6d356c99277c req-a4af3357-e1c9-4857-b9c6-76e4b4f2e235 service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] No waiting events found dispatching network-vif-plugged-be93d43b-8fea-456a-9442-6ce3961bfcdb {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1130.524870] env[67008]: WARNING nova.compute.manager [req-6009cca9-bf77-43bd-a09c-6d356c99277c req-a4af3357-e1c9-4857-b9c6-76e4b4f2e235 service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Received unexpected event network-vif-plugged-be93d43b-8fea-456a-9442-6ce3961bfcdb for instance with vm_state building and task_state spawning. [ 1130.526079] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "refresh_cache-81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1130.526221] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquired lock "refresh_cache-81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1130.526361] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1130.603489] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1131.030952] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Updating instance_info_cache with network_info: [{"id": "be93d43b-8fea-456a-9442-6ce3961bfcdb", "address": "fa:16:3e:29:a3:ed", "network": {"id": "97a58486-5c16-459b-82d5-58bd9dd82fa9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1397290558-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed062493109b4e2e8d86864d605c2af3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe93d43b-8f", "ovs_interfaceid": "be93d43b-8fea-456a-9442-6ce3961bfcdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.042735] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Releasing lock "refresh_cache-81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1131.043033] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Instance network_info: |[{"id": "be93d43b-8fea-456a-9442-6ce3961bfcdb", "address": "fa:16:3e:29:a3:ed", "network": {"id": "97a58486-5c16-459b-82d5-58bd9dd82fa9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1397290558-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed062493109b4e2e8d86864d605c2af3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe93d43b-8f", "ovs_interfaceid": "be93d43b-8fea-456a-9442-6ce3961bfcdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1131.043423] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:a3:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be93d43b-8fea-456a-9442-6ce3961bfcdb', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1131.050856] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Creating folder: Project (ed062493109b4e2e8d86864d605c2af3). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1131.051570] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d635d13a-ba2b-4dbf-bc44-ef77d612e6b5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.062210] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Created folder: Project (ed062493109b4e2e8d86864d605c2af3) in parent group-v567993. [ 1131.062388] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Creating folder: Instances. Parent ref: group-v568054. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1131.062599] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85bb4e2b-6f26-4f06-b575-7c3d17354cd4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.071776] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Created folder: Instances in parent group-v568054. [ 1131.071995] env[67008]: DEBUG oslo.service.loopingcall [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1131.072185] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1131.072374] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d8c53e2-c113-4c6c-8953-2a34a04a657a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.091433] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1131.091433] env[67008]: value = "task-2824925" [ 1131.091433] env[67008]: _type = "Task" [ 1131.091433] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.098627] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824925, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.601992] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824925, 'name': CreateVM_Task, 'duration_secs': 0.312035} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.602193] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1131.602865] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1131.603042] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1131.603377] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1131.603624] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9b96513-d097-4156-bfcc-3c24f5bc7c97 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.608063] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Waiting for the task: (returnval){ [ 1131.608063] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d83722-3b7a-b6c3-eb91-3405bdbc3597" [ 1131.608063] env[67008]: _type = "Task" [ 1131.608063] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.615137] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d83722-3b7a-b6c3-eb91-3405bdbc3597, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.122359] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1132.122747] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1132.123101] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1132.565961] env[67008]: DEBUG nova.compute.manager [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Received event network-changed-be93d43b-8fea-456a-9442-6ce3961bfcdb {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1132.566181] env[67008]: DEBUG nova.compute.manager [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Refreshing instance network info cache due to event network-changed-be93d43b-8fea-456a-9442-6ce3961bfcdb. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1132.566404] env[67008]: DEBUG oslo_concurrency.lockutils [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] Acquiring lock "refresh_cache-81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1132.566550] env[67008]: DEBUG oslo_concurrency.lockutils [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] Acquired lock "refresh_cache-81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1132.566695] env[67008]: DEBUG nova.network.neutron [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Refreshing network info cache for port be93d43b-8fea-456a-9442-6ce3961bfcdb {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1132.967239] env[67008]: DEBUG nova.network.neutron [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Updated VIF entry in instance network info cache for port be93d43b-8fea-456a-9442-6ce3961bfcdb. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1132.967643] env[67008]: DEBUG nova.network.neutron [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Updating instance_info_cache with network_info: [{"id": "be93d43b-8fea-456a-9442-6ce3961bfcdb", "address": "fa:16:3e:29:a3:ed", "network": {"id": "97a58486-5c16-459b-82d5-58bd9dd82fa9", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1397290558-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ed062493109b4e2e8d86864d605c2af3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe93d43b-8f", "ovs_interfaceid": "be93d43b-8fea-456a-9442-6ce3961bfcdb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.976457] env[67008]: DEBUG oslo_concurrency.lockutils [req-5d368f6f-b0f9-4b00-b8db-2eed49c9cbd5 req-9e2f5f31-0c94-45fe-ab3e-abf4efc9cace service nova] Releasing lock "refresh_cache-81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1135.857382] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.868642] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1135.868857] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1135.869039] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1135.869221] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1135.870312] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff37b07f-0ee6-44e4-9ee1-887a8e2fd16e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.879684] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd193d17-d192-4e88-b6e8-9b55e9dc88ad {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.894794] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942e0192-01be-427c-a105-ffc56d27c8c9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.901634] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45008a08-e2aa-470c-92a3-7d7666a9d653 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.931381] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181070MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1135.931543] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1135.931743] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1136.072348] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.072521] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.072651] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.072774] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.072895] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.073020] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.073139] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.073257] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.073370] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.073484] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1136.086545] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.099556] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6d1218c7-1e36-4276-9675-5e15407cbc33 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.109661] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 316742af-0ca9-4695-8216-5c7067e27d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.118678] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 91f762b8-dbf7-4c6f-b07d-5989da743a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.128059] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba9efdd-6e4d-488e-aa38-815f01c4b571 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.136990] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 33e54452-17bb-4141-856a-7e19e2e60dbf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.145831] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 07abda77-2e28-4bac-a36b-dc837208c28f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.154900] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.163472] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b976b36c-2847-483a-babb-77e58cdf3932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.172611] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.181647] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.190873] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e3812a2c-b59a-48cb-90b5-0b185351d3b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.199700] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fad05213-ac21-499c-b7fb-1929e9b3fca5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.208646] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9617d5d7-7977-475c-8375-1a59ed302444 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.218896] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0a52230a-d439-45dd-a908-bd698f94e841 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.229550] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1136.229783] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1136.229931] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1136.617843] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175364cd-e79b-46d8-b74b-e2d295568d63 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.627288] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdba985-9461-4f6f-9b9b-902139fb834b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.657206] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772b9074-2b84-4922-b154-e22c088d9bc2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.664335] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b564f838-4229-44c1-8712-6dd85943b382 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.677386] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.686688] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1136.702438] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1136.702637] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.771s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1137.702883] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.702883] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.857177] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.810694] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1138.811057] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1138.869886] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.856140] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.856409] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.856556] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1140.851766] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.856401] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.856740] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1140.856886] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1140.879375] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.879375] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.879576] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.879656] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.879771] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.879842] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.879966] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.880100] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.880229] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.880393] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1140.880510] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1141.857891] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.857891] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances with incomplete migration {{(pid=67008) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1142.866543] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.857629] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.857933] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1144.867327] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] There are 0 instances to clean {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1146.861716] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.981634] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.001944] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Getting list of instances from cluster (obj){ [ 1160.001944] env[67008]: value = "domain-c8" [ 1160.001944] env[67008]: _type = "ClusterComputeResource" [ 1160.001944] env[67008]: } {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1160.003195] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe1bc77-f24d-471a-a5f4-199694cea1fa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.020163] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Got total of 10 instances {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1160.020317] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid f5fce891-3c35-415e-9d09-c5c8dca3dde3 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.020504] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 8b645fe3-0a5d-4f12-a99d-1f0580432d59 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.020663] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.020845] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.021000] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 95604dd8-b797-440e-a844-af44609faa61 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.021175] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.021324] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.021471] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.021616] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 3c10ac79-441a-467c-a3aa-fdb9a9451698 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.021758] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1160.022062] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.022289] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.022497] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.022687] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.022877] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "95604dd8-b797-440e-a844-af44609faa61" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.023080] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.023278] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.023468] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.023657] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1160.023846] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1174.975011] env[67008]: WARNING oslo_vmware.rw_handles [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1174.975011] env[67008]: ERROR oslo_vmware.rw_handles [ 1174.975582] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1174.977661] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1174.977955] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Copying Virtual Disk [datastore1] vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/1aee9c9e-83e4-4a2c-a3c9-3045698e2bf9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1174.978258] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-320327b0-7130-4b99-8d09-18fc0b4e472b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.985894] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Waiting for the task: (returnval){ [ 1174.985894] env[67008]: value = "task-2824926" [ 1174.985894] env[67008]: _type = "Task" [ 1174.985894] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.993910] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Task: {'id': task-2824926, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.496244] env[67008]: DEBUG oslo_vmware.exceptions [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1175.496541] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1175.497161] env[67008]: ERROR nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1175.497161] env[67008]: Faults: ['InvalidArgument'] [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Traceback (most recent call last): [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] yield resources [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self.driver.spawn(context, instance, image_meta, [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self._fetch_image_if_missing(context, vi) [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] image_cache(vi, tmp_image_ds_loc) [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] vm_util.copy_virtual_disk( [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] session._wait_for_task(vmdk_copy_task) [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] return self.wait_for_task(task_ref) [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] return evt.wait() [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] result = hub.switch() [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] return self.greenlet.switch() [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self.f(*self.args, **self.kw) [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] raise exceptions.translate_fault(task_info.error) [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Faults: ['InvalidArgument'] [ 1175.497161] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] [ 1175.498083] env[67008]: INFO nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Terminating instance [ 1175.499302] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1175.499302] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1175.499528] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87358ac7-83ae-4e7d-bfff-71e61627df51 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.501791] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1175.501979] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1175.502688] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b1a615-25de-4f33-9885-b390f11e0648 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.509567] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1175.510487] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad760c82-600c-4a15-b475-092f9d644e94 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.511787] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1175.511957] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1175.512613] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-819e7e78-9fed-4854-adf6-8a4e764d4f61 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.517990] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Waiting for the task: (returnval){ [ 1175.517990] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]523c2087-699f-cd8c-ec49-1f1f185aad0b" [ 1175.517990] env[67008]: _type = "Task" [ 1175.517990] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.525989] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]523c2087-699f-cd8c-ec49-1f1f185aad0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.581629] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1175.581863] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1175.582076] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Deleting the datastore file [datastore1] f5fce891-3c35-415e-9d09-c5c8dca3dde3 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1175.582345] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe0400b7-5e40-43b3-8b22-7c549c89206a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.588575] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Waiting for the task: (returnval){ [ 1175.588575] env[67008]: value = "task-2824928" [ 1175.588575] env[67008]: _type = "Task" [ 1175.588575] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.597351] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Task: {'id': task-2824928, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.029012] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1176.029356] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Creating directory with path [datastore1] vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1176.029692] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-215e11f6-b91d-4058-ba9a-5051496ae10a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.041204] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Created directory with path [datastore1] vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1176.041402] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Fetch image to [datastore1] vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1176.041571] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1176.042394] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55ef65c-1941-48f1-8087-20164772bf08 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.049269] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891b6ff4-c27a-4ea9-990b-cb734f137059 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.058214] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc994c16-0c82-4a74-b8be-9beb90f3f7e1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.089183] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1358f482-f1fe-4539-9a46-021ff5a0fa3b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.101037] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c2998900-61d1-40a8-80f7-22958fde7d25 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.101886] env[67008]: DEBUG oslo_vmware.api [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Task: {'id': task-2824928, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074498} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.102164] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.102344] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1176.102494] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1176.102664] env[67008]: INFO nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1176.104745] env[67008]: DEBUG nova.compute.claims [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1176.104977] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1176.105220] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1176.123053] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1176.166692] env[67008]: DEBUG nova.scheduler.client.report [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Refreshing inventories for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1176.172629] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1176.228668] env[67008]: DEBUG nova.scheduler.client.report [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Updating ProviderTree inventory for provider ad100a41-192a-4a03-bdd9-0a78ce856705 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1176.228917] env[67008]: DEBUG nova.compute.provider_tree [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Updating inventory in ProviderTree for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1176.233055] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1176.233231] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1176.242744] env[67008]: DEBUG nova.scheduler.client.report [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Refreshing aggregate associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, aggregates: None {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1176.262335] env[67008]: DEBUG nova.scheduler.client.report [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Refreshing trait associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1176.540306] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a3412f-871f-41e0-b1fe-afe8f91c7331 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.547791] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2765250f-311a-4f82-ac10-e2760bd479fc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.578144] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7c6016-ab37-43a9-a151-67b271f357f7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.584993] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d37e1dd-b5a0-4177-b70e-232fc9e21028 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.597462] env[67008]: DEBUG nova.compute.provider_tree [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.607838] env[67008]: DEBUG nova.scheduler.client.report [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1176.622724] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.517s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1176.623290] env[67008]: ERROR nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1176.623290] env[67008]: Faults: ['InvalidArgument'] [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Traceback (most recent call last): [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self.driver.spawn(context, instance, image_meta, [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self._fetch_image_if_missing(context, vi) [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] image_cache(vi, tmp_image_ds_loc) [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] vm_util.copy_virtual_disk( [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] session._wait_for_task(vmdk_copy_task) [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] return self.wait_for_task(task_ref) [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] return evt.wait() [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] result = hub.switch() [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] return self.greenlet.switch() [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] self.f(*self.args, **self.kw) [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] raise exceptions.translate_fault(task_info.error) [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Faults: ['InvalidArgument'] [ 1176.623290] env[67008]: ERROR nova.compute.manager [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] [ 1176.624456] env[67008]: DEBUG nova.compute.utils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1176.625316] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Build of instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 was re-scheduled: A specified parameter was not correct: fileType [ 1176.625316] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1176.625677] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1176.625847] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1176.626023] env[67008]: DEBUG nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1176.626192] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1177.073724] env[67008]: DEBUG nova.network.neutron [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.086013] env[67008]: INFO nova.compute.manager [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Took 0.46 seconds to deallocate network for instance. [ 1177.189572] env[67008]: INFO nova.scheduler.client.report [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Deleted allocations for instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 [ 1177.213168] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7b225d3c-9d20-4292-8a25-52aee4cd8eb4 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 567.684s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.214388] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 369.472s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.214660] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Acquiring lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1177.214910] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.215095] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.217532] env[67008]: INFO nova.compute.manager [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Terminating instance [ 1177.219161] env[67008]: DEBUG nova.compute.manager [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1177.219341] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1177.219825] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e754ddb-014c-4c10-91c4-a1da8086cd5a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.224579] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1177.230948] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecca527b-aee7-4b3f-8953-10f7234ae424 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.261735] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5fce891-3c35-415e-9d09-c5c8dca3dde3 could not be found. [ 1177.261735] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1177.261735] env[67008]: INFO nova.compute.manager [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1177.261943] env[67008]: DEBUG oslo.service.loopingcall [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1177.264118] env[67008]: DEBUG nova.compute.manager [-] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1177.264219] env[67008]: DEBUG nova.network.neutron [-] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1177.279142] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1177.279381] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.280892] env[67008]: INFO nova.compute.claims [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1177.297629] env[67008]: DEBUG nova.network.neutron [-] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.315192] env[67008]: INFO nova.compute.manager [-] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] Took 0.05 seconds to deallocate network for instance. [ 1177.405779] env[67008]: DEBUG oslo_concurrency.lockutils [None req-8716f330-95ef-4212-ac98-fb5874e0cea1 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.406659] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 17.385s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1177.406867] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: f5fce891-3c35-415e-9d09-c5c8dca3dde3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1177.407052] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "f5fce891-3c35-415e-9d09-c5c8dca3dde3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.645609] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bf2940-8994-4a3e-ab06-e15bc6acecda {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.653400] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a3b3ea-2f16-47be-90b3-bd0f7ad98474 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.683418] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b460bd-ca6a-4351-a88b-fb4ef85e5359 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.690751] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3921578c-cddb-4c45-9701-72070a9a0180 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.704672] env[67008]: DEBUG nova.compute.provider_tree [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.714979] env[67008]: DEBUG nova.scheduler.client.report [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1177.728111] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.449s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1177.728560] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1177.766763] env[67008]: DEBUG nova.compute.utils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1177.767947] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1177.768134] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1177.778165] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1177.822574] env[67008]: DEBUG nova.policy [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '00a4714cefa8439591f1c04b9633d2d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50523e61dfe8446d9dd72ab8e8fbd19c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1177.838411] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1177.862511] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1177.862796] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1177.862954] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1177.863149] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1177.863296] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1177.863440] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1177.863643] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1177.863799] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1177.863960] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1177.864135] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1177.864308] env[67008]: DEBUG nova.virt.hardware [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1177.865342] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c351d798-2963-49d6-ae08-18c2c0721160 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.873085] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37926e1-cbeb-4e86-aad1-88927c7d8540 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.342559] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Successfully created port: 1e2b493a-301e-4c12-a786-f24792b9bed4 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1179.197085] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Successfully updated port: 1e2b493a-301e-4c12-a786-f24792b9bed4 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1179.208982] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "refresh_cache-94e8ddc5-d43c-49d5-93c6-f08081ed7643" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1179.209173] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired lock "refresh_cache-94e8ddc5-d43c-49d5-93c6-f08081ed7643" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1179.209308] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1179.253804] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1179.274838] env[67008]: DEBUG nova.compute.manager [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Received event network-vif-plugged-1e2b493a-301e-4c12-a786-f24792b9bed4 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1179.275072] env[67008]: DEBUG oslo_concurrency.lockutils [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] Acquiring lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1179.275280] env[67008]: DEBUG oslo_concurrency.lockutils [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1179.275446] env[67008]: DEBUG oslo_concurrency.lockutils [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1179.275608] env[67008]: DEBUG nova.compute.manager [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] No waiting events found dispatching network-vif-plugged-1e2b493a-301e-4c12-a786-f24792b9bed4 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1179.275768] env[67008]: WARNING nova.compute.manager [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Received unexpected event network-vif-plugged-1e2b493a-301e-4c12-a786-f24792b9bed4 for instance with vm_state building and task_state spawning. [ 1179.275923] env[67008]: DEBUG nova.compute.manager [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Received event network-changed-1e2b493a-301e-4c12-a786-f24792b9bed4 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1179.276080] env[67008]: DEBUG nova.compute.manager [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Refreshing instance network info cache due to event network-changed-1e2b493a-301e-4c12-a786-f24792b9bed4. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1179.276240] env[67008]: DEBUG oslo_concurrency.lockutils [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] Acquiring lock "refresh_cache-94e8ddc5-d43c-49d5-93c6-f08081ed7643" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1179.536984] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Updating instance_info_cache with network_info: [{"id": "1e2b493a-301e-4c12-a786-f24792b9bed4", "address": "fa:16:3e:b0:a9:92", "network": {"id": "324f2653-1ff3-4547-8dee-fbdad1adff80", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-464980593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50523e61dfe8446d9dd72ab8e8fbd19c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "113aa98d-90ca-43bc-a534-8908d1ec7d15", "external-id": "nsx-vlan-transportzone-186", "segmentation_id": 186, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e2b493a-30", "ovs_interfaceid": "1e2b493a-301e-4c12-a786-f24792b9bed4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.549598] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Releasing lock "refresh_cache-94e8ddc5-d43c-49d5-93c6-f08081ed7643" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1179.550100] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Instance network_info: |[{"id": "1e2b493a-301e-4c12-a786-f24792b9bed4", "address": "fa:16:3e:b0:a9:92", "network": {"id": "324f2653-1ff3-4547-8dee-fbdad1adff80", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-464980593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50523e61dfe8446d9dd72ab8e8fbd19c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "113aa98d-90ca-43bc-a534-8908d1ec7d15", "external-id": "nsx-vlan-transportzone-186", "segmentation_id": 186, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e2b493a-30", "ovs_interfaceid": "1e2b493a-301e-4c12-a786-f24792b9bed4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1179.550437] env[67008]: DEBUG oslo_concurrency.lockutils [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] Acquired lock "refresh_cache-94e8ddc5-d43c-49d5-93c6-f08081ed7643" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1179.550626] env[67008]: DEBUG nova.network.neutron [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Refreshing network info cache for port 1e2b493a-301e-4c12-a786-f24792b9bed4 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1179.552212] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:a9:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '113aa98d-90ca-43bc-a534-8908d1ec7d15', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e2b493a-301e-4c12-a786-f24792b9bed4', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1179.560194] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating folder: Project (50523e61dfe8446d9dd72ab8e8fbd19c). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1179.563050] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c4182c1-4d26-4dd5-bffd-1224b8647bcc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.573685] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Created folder: Project (50523e61dfe8446d9dd72ab8e8fbd19c) in parent group-v567993. [ 1179.573876] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating folder: Instances. Parent ref: group-v568057. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1179.574121] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75044ddd-58cb-4e17-845e-aaaf9bd5a2e1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.582871] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Created folder: Instances in parent group-v568057. [ 1179.583116] env[67008]: DEBUG oslo.service.loopingcall [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1179.583296] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1179.583492] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47676db1-0c0a-42d8-bb34-2431af514a80 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.602256] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1179.602256] env[67008]: value = "task-2824931" [ 1179.602256] env[67008]: _type = "Task" [ 1179.602256] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.609553] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824931, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.112190] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824931, 'name': CreateVM_Task, 'duration_secs': 0.303705} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.112400] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1180.112996] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1180.113269] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1180.113480] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1180.113723] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a696fc0-c5d4-42f5-9a68-699d6e345815 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.117917] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1180.117917] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]526fa78a-abcf-a74b-3c38-62955ac606f8" [ 1180.117917] env[67008]: _type = "Task" [ 1180.117917] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.125141] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]526fa78a-abcf-a74b-3c38-62955ac606f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.193208] env[67008]: DEBUG nova.network.neutron [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Updated VIF entry in instance network info cache for port 1e2b493a-301e-4c12-a786-f24792b9bed4. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1180.193570] env[67008]: DEBUG nova.network.neutron [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Updating instance_info_cache with network_info: [{"id": "1e2b493a-301e-4c12-a786-f24792b9bed4", "address": "fa:16:3e:b0:a9:92", "network": {"id": "324f2653-1ff3-4547-8dee-fbdad1adff80", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-464980593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50523e61dfe8446d9dd72ab8e8fbd19c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "113aa98d-90ca-43bc-a534-8908d1ec7d15", "external-id": "nsx-vlan-transportzone-186", "segmentation_id": 186, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e2b493a-30", "ovs_interfaceid": "1e2b493a-301e-4c12-a786-f24792b9bed4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.202655] env[67008]: DEBUG oslo_concurrency.lockutils [req-89b5e6d1-c596-4d5b-9eb7-949738cc2926 req-5a238a51-6d3e-4a2b-ab63-8f8930c2417f service nova] Releasing lock "refresh_cache-94e8ddc5-d43c-49d5-93c6-f08081ed7643" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1180.628211] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1180.628464] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1180.628697] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1195.857279] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.869288] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1195.869503] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1195.869679] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1195.869851] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1195.871099] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3b951b-516f-4538-8c80-cb4063c65502 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.879986] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ad95f4-4dbd-4e31-8e64-7eb5d9b4f33f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.893574] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564cbacd-9281-45c6-b814-cd6138227433 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.899505] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4f240a-3d92-47ca-8c7d-69b43d62b00b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.928610] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181066MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1195.928735] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1195.928949] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1196.003546] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.003721] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.003851] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.003978] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.004112] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.004234] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.004352] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.004467] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.004579] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.004692] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1196.017365] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6d1218c7-1e36-4276-9675-5e15407cbc33 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.027643] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 316742af-0ca9-4695-8216-5c7067e27d7c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.038431] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 91f762b8-dbf7-4c6f-b07d-5989da743a88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.048654] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba9efdd-6e4d-488e-aa38-815f01c4b571 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.058015] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 33e54452-17bb-4141-856a-7e19e2e60dbf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.067355] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 07abda77-2e28-4bac-a36b-dc837208c28f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.077500] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.086985] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b976b36c-2847-483a-babb-77e58cdf3932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.096349] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.105877] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.115861] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e3812a2c-b59a-48cb-90b5-0b185351d3b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.126127] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fad05213-ac21-499c-b7fb-1929e9b3fca5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.135862] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9617d5d7-7977-475c-8375-1a59ed302444 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.145389] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0a52230a-d439-45dd-a908-bd698f94e841 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.155299] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.165907] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1196.165907] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1196.166104] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1196.458041] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad33dbe4-5de0-4b73-8210-103be349fff2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.465691] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a009188e-f9e1-4783-a82a-ca95def51f11 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.495692] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257b11a6-c29f-4144-ac2e-957f8205fb88 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.502544] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24c8739-d17a-4333-80b3-8424ae4219a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.515236] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.523485] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1196.536760] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1196.536964] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.608s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1198.536694] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1198.857104] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.857980] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.852068] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.856449] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.856632] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.856772] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1200.865469] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1201.628923] env[67008]: DEBUG oslo_concurrency.lockutils [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1202.857229] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.857493] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1202.857532] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1202.878044] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.878208] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.878269] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.878379] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.878502] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.878623] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.878742] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.878860] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.879011] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.879170] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1202.879264] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1203.858080] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.550149] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1210.550645] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1222.772825] env[67008]: WARNING oslo_vmware.rw_handles [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1222.772825] env[67008]: ERROR oslo_vmware.rw_handles [ 1222.773430] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1222.775531] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1222.775810] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Copying Virtual Disk [datastore1] vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/c663e289-bd1b-484d-8f4e-95c00eec2f64/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1222.776144] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb3a48ae-e573-48d7-a704-6da572f476f7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.785574] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Waiting for the task: (returnval){ [ 1222.785574] env[67008]: value = "task-2824932" [ 1222.785574] env[67008]: _type = "Task" [ 1222.785574] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.793348] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Task: {'id': task-2824932, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.296686] env[67008]: DEBUG oslo_vmware.exceptions [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1223.296979] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1223.297542] env[67008]: ERROR nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1223.297542] env[67008]: Faults: ['InvalidArgument'] [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] yield resources [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.driver.spawn(context, instance, image_meta, [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._fetch_image_if_missing(context, vi) [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] image_cache(vi, tmp_image_ds_loc) [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] vm_util.copy_virtual_disk( [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] session._wait_for_task(vmdk_copy_task) [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.wait_for_task(task_ref) [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return evt.wait() [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] result = hub.switch() [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.greenlet.switch() [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.f(*self.args, **self.kw) [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise exceptions.translate_fault(task_info.error) [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Faults: ['InvalidArgument'] [ 1223.297542] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1223.298576] env[67008]: INFO nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Terminating instance [ 1223.299482] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1223.299758] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.300450] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1223.300668] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1223.300923] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf55a3ec-05a7-4955-b43a-68de8d98e57a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.303835] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02b0831-d85b-43ac-bbfd-0a825d234ccc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.310752] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1223.311768] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2c2710b-ffea-4d76-aa0a-06736ed7cdbf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.313190] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.313369] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1223.314043] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6019fc4a-d0f8-4b41-850a-7923d0c53836 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.320555] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Waiting for the task: (returnval){ [ 1223.320555] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52044c01-8672-44fc-b34b-ffe54f842945" [ 1223.320555] env[67008]: _type = "Task" [ 1223.320555] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.327386] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52044c01-8672-44fc-b34b-ffe54f842945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.380056] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1223.380304] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1223.380486] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Deleting the datastore file [datastore1] 8b645fe3-0a5d-4f12-a99d-1f0580432d59 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1223.380749] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b77c602-a079-404b-80a4-acdeb819af55 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.388180] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Waiting for the task: (returnval){ [ 1223.388180] env[67008]: value = "task-2824934" [ 1223.388180] env[67008]: _type = "Task" [ 1223.388180] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.394520] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Task: {'id': task-2824934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.831261] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1223.831651] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Creating directory with path [datastore1] vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1223.831781] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c94d9ff-3798-4529-97f4-552f0d7ff623 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.844059] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Created directory with path [datastore1] vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1223.844306] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Fetch image to [datastore1] vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1223.844487] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1223.845293] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0a9f93-9cf8-4c92-92ae-e1b15f59c974 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.852706] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2120e13a-68dc-4b57-a4c2-3250b98104fe {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.862317] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daba8b3c-1716-4de4-b391-a374658b18a1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.898442] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bca6e17-0f2c-4b69-b421-15ab0830b9c9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.906419] env[67008]: DEBUG oslo_vmware.api [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Task: {'id': task-2824934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084837} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.908036] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.908285] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1223.908520] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1223.908734] env[67008]: INFO nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1223.911041] env[67008]: DEBUG nova.compute.claims [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1223.911257] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1223.911683] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1223.914095] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2deaa979-ddd0-4581-8598-6d3fa019bdea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.937826] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1224.268703] env[67008]: DEBUG oslo_vmware.rw_handles [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1224.331525] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d992170-95de-4e66-93d4-6194935f3e94 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.339702] env[67008]: DEBUG oslo_vmware.rw_handles [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1224.339962] env[67008]: DEBUG oslo_vmware.rw_handles [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1224.343466] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e83f0dd-c8c9-45d5-ab1c-8039f3bf96c7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.375875] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6674b9-7eb6-4855-b463-fffdd9ceb4ca {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.383349] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23db7b0b-cf91-4388-a18a-35a8747a0b53 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.398360] env[67008]: DEBUG nova.compute.provider_tree [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.410572] env[67008]: DEBUG nova.scheduler.client.report [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1224.425312] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.514s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1224.425850] env[67008]: ERROR nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1224.425850] env[67008]: Faults: ['InvalidArgument'] [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.driver.spawn(context, instance, image_meta, [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._fetch_image_if_missing(context, vi) [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] image_cache(vi, tmp_image_ds_loc) [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] vm_util.copy_virtual_disk( [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] session._wait_for_task(vmdk_copy_task) [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.wait_for_task(task_ref) [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return evt.wait() [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] result = hub.switch() [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.greenlet.switch() [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.f(*self.args, **self.kw) [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise exceptions.translate_fault(task_info.error) [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Faults: ['InvalidArgument'] [ 1224.425850] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.426992] env[67008]: DEBUG nova.compute.utils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1224.428433] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Build of instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 was re-scheduled: A specified parameter was not correct: fileType [ 1224.428433] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1224.428926] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1224.429039] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1224.429207] env[67008]: DEBUG nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1224.429391] env[67008]: DEBUG nova.network.neutron [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1224.874175] env[67008]: DEBUG neutronclient.v2_0.client [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67008) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1224.874175] env[67008]: ERROR nova.compute.manager [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.driver.spawn(context, instance, image_meta, [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._fetch_image_if_missing(context, vi) [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] image_cache(vi, tmp_image_ds_loc) [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] vm_util.copy_virtual_disk( [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] session._wait_for_task(vmdk_copy_task) [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.wait_for_task(task_ref) [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return evt.wait() [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] result = hub.switch() [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.greenlet.switch() [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.f(*self.args, **self.kw) [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise exceptions.translate_fault(task_info.error) [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Faults: ['InvalidArgument'] [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] During handling of the above exception, another exception occurred: [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._build_and_run_instance(context, instance, image, [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 1224.874175] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise exception.RescheduledException( [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] nova.exception.RescheduledException: Build of instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 was re-scheduled: A specified parameter was not correct: fileType [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Faults: ['InvalidArgument'] [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] During handling of the above exception, another exception occurred: [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] exception_handler_v20(status_code, error_body) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise client_exc(message=error_message, [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Neutron server returns request_ids: ['req-7dba9f89-339d-4c95-b7ab-596f41418dac'] [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] During handling of the above exception, another exception occurred: [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._deallocate_network(context, instance, requested_networks) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.network_api.deallocate_for_instance( [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] data = neutron.list_ports(**search_opts) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.list('ports', self.ports_path, retrieve_all, [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] for r in self._pagination(collection, path, **params): [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] res = self.get(path, params=params) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.retry_request("GET", action, body=body, [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.875758] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.do_request(method, action, body=body, [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._handle_fault_response(status_code, replybody, resp) [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise exception.Unauthorized() [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] nova.exception.Unauthorized: Not authorized. [ 1224.877103] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1224.947652] env[67008]: INFO nova.scheduler.client.report [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Deleted allocations for instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 [ 1224.969081] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1fafbfef-9865-486d-bfc1-492bb538d32d tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 614.866s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1224.970387] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 416.619s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1224.970601] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Acquiring lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1224.970801] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1224.970963] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1224.972884] env[67008]: INFO nova.compute.manager [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Terminating instance [ 1224.974446] env[67008]: DEBUG nova.compute.manager [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1224.974666] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1224.975187] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bd9eed2-7965-4da3-a511-8d607a096e4d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.985041] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfe5372-6fb6-4fc8-ae6f-ae1f18f6cfc6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.995413] env[67008]: DEBUG nova.compute.manager [None req-432171d1-6acf-4170-bc52-90b986452db8 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: 6d1218c7-1e36-4276-9675-5e15407cbc33] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1225.015267] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8b645fe3-0a5d-4f12-a99d-1f0580432d59 could not be found. [ 1225.015466] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1225.015639] env[67008]: INFO nova.compute.manager [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1225.015886] env[67008]: DEBUG oslo.service.loopingcall [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1225.016123] env[67008]: DEBUG nova.compute.manager [-] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1225.016221] env[67008]: DEBUG nova.network.neutron [-] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1225.018451] env[67008]: DEBUG nova.compute.manager [None req-432171d1-6acf-4170-bc52-90b986452db8 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] [instance: 6d1218c7-1e36-4276-9675-5e15407cbc33] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1225.038337] env[67008]: DEBUG oslo_concurrency.lockutils [None req-432171d1-6acf-4170-bc52-90b986452db8 tempest-AttachVolumeShelveTestJSON-1732525994 tempest-AttachVolumeShelveTestJSON-1732525994-project-member] Lock "6d1218c7-1e36-4276-9675-5e15407cbc33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.155s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.047384] env[67008]: DEBUG nova.compute.manager [None req-2b6c4913-8a44-4676-bb22-fb99a7e4f261 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: 91f762b8-dbf7-4c6f-b07d-5989da743a88] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1225.071439] env[67008]: DEBUG nova.compute.manager [None req-2b6c4913-8a44-4676-bb22-fb99a7e4f261 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: 91f762b8-dbf7-4c6f-b07d-5989da743a88] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1225.096502] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b6c4913-8a44-4676-bb22-fb99a7e4f261 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "91f762b8-dbf7-4c6f-b07d-5989da743a88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.525s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.108859] env[67008]: DEBUG nova.compute.manager [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] [instance: 316742af-0ca9-4695-8216-5c7067e27d7c] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1225.132398] env[67008]: DEBUG nova.compute.manager [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] [instance: 316742af-0ca9-4695-8216-5c7067e27d7c] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1225.154204] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Lock "316742af-0ca9-4695-8216-5c7067e27d7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.442s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.163287] env[67008]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67008) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1225.163287] env[67008]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-89ef58d8-3a33-4a12-a8e2-9cf8270f602c'] [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1225.163882] env[67008]: ERROR oslo.service.loopingcall [ 1225.165832] env[67008]: ERROR nova.compute.manager [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1225.167534] env[67008]: DEBUG nova.compute.manager [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] [instance: dba9efdd-6e4d-488e-aa38-815f01c4b571] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1225.192575] env[67008]: DEBUG nova.compute.manager [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] [instance: dba9efdd-6e4d-488e-aa38-815f01c4b571] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1225.203962] env[67008]: ERROR nova.compute.manager [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] exception_handler_v20(status_code, error_body) [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise client_exc(message=error_message, [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Neutron server returns request_ids: ['req-89ef58d8-3a33-4a12-a8e2-9cf8270f602c'] [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] During handling of the above exception, another exception occurred: [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Traceback (most recent call last): [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._delete_instance(context, instance, bdms) [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._shutdown_instance(context, instance, bdms) [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._try_deallocate_network(context, instance, requested_networks) [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] with excutils.save_and_reraise_exception(): [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.force_reraise() [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise self.value [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] _deallocate_network_with_retries() [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return evt.wait() [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] result = hub.switch() [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.greenlet.switch() [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] result = func(*self.args, **self.kw) [ 1225.203962] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] result = f(*args, **kwargs) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._deallocate_network( [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self.network_api.deallocate_for_instance( [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] data = neutron.list_ports(**search_opts) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.list('ports', self.ports_path, retrieve_all, [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] for r in self._pagination(collection, path, **params): [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] res = self.get(path, params=params) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.retry_request("GET", action, body=body, [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] return self.do_request(method, action, body=body, [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] ret = obj(*args, **kwargs) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] self._handle_fault_response(status_code, replybody, resp) [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1225.205496] env[67008]: ERROR nova.compute.manager [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] [ 1225.222259] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Lock "dba9efdd-6e4d-488e-aa38-815f01c4b571" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.480s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.231416] env[67008]: DEBUG nova.compute.manager [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] [instance: 33e54452-17bb-4141-856a-7e19e2e60dbf] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1225.241107] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.269s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.241107] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 65.218s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1225.241107] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] During sync_power_state the instance has a pending task (deleting). Skip. [ 1225.241107] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "8b645fe3-0a5d-4f12-a99d-1f0580432d59" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.267395] env[67008]: DEBUG nova.compute.manager [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] [instance: 33e54452-17bb-4141-856a-7e19e2e60dbf] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1225.294519] env[67008]: DEBUG oslo_concurrency.lockutils [None req-002dd0d5-afe6-4c54-acaf-5d3b9ce8587b tempest-ListServersNegativeTestJSON-243247854 tempest-ListServersNegativeTestJSON-243247854-project-member] Lock "33e54452-17bb-4141-856a-7e19e2e60dbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.522s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.302820] env[67008]: INFO nova.compute.manager [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] [instance: 8b645fe3-0a5d-4f12-a99d-1f0580432d59] Successfully reverted task state from None on failure for instance. [ 1225.306696] env[67008]: DEBUG nova.compute.manager [None req-f92c2104-a2f6-4c86-9b35-bd16be3e092b tempest-ServerMetadataNegativeTestJSON-320936444 tempest-ServerMetadataNegativeTestJSON-320936444-project-member] [instance: 07abda77-2e28-4bac-a36b-dc837208c28f] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server [None req-e508eb04-466f-493d-ae6f-714560497361 tempest-TenantUsagesTestJSON-523036506 tempest-TenantUsagesTestJSON-523036506-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-89ef58d8-3a33-4a12-a8e2-9cf8270f602c'] [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1225.310583] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1225.312303] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1225.314532] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1225.314532] env[67008]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1225.314532] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1225.314532] env[67008]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1225.314532] env[67008]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1225.314532] env[67008]: ERROR oslo_messaging.rpc.server [ 1225.332969] env[67008]: DEBUG nova.compute.manager [None req-f92c2104-a2f6-4c86-9b35-bd16be3e092b tempest-ServerMetadataNegativeTestJSON-320936444 tempest-ServerMetadataNegativeTestJSON-320936444-project-member] [instance: 07abda77-2e28-4bac-a36b-dc837208c28f] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1225.353354] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f92c2104-a2f6-4c86-9b35-bd16be3e092b tempest-ServerMetadataNegativeTestJSON-320936444 tempest-ServerMetadataNegativeTestJSON-320936444-project-member] Lock "07abda77-2e28-4bac-a36b-dc837208c28f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.692s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.361982] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1225.418349] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1225.418714] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1225.420117] env[67008]: INFO nova.compute.claims [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1225.727381] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3547912d-cc9a-4638-b85d-f0d3b5155989 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.738704] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91846749-422a-4c5f-abaf-1cd4beb738e1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.776077] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d186d37d-d97a-4e62-b1bd-c878b9b11d37 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.783741] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1882bd1-27dc-4ed9-b912-9ff112e1a98d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.803137] env[67008]: DEBUG nova.compute.provider_tree [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.812471] env[67008]: DEBUG nova.scheduler.client.report [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1225.827154] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.408s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1225.827657] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1225.862969] env[67008]: DEBUG nova.compute.utils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1225.865020] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1225.865020] env[67008]: DEBUG nova.network.neutron [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1225.873822] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1225.946361] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1225.972847] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1225.973111] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1225.973272] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1225.973452] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1225.973598] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1225.973744] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1225.973946] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1225.974116] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1225.974288] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1225.974448] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1225.974617] env[67008]: DEBUG nova.virt.hardware [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1225.975489] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf483ac-41e2-40d2-86a5-a40effca0461 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.980760] env[67008]: DEBUG nova.policy [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '645c897e2c0f41009463aba4ce06f047', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ffb1c5fe6bac4371ba07d4e70d138870', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1225.986199] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f76946-8c6d-4311-b1c8-7411c0872751 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.287018] env[67008]: DEBUG nova.network.neutron [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Successfully created port: 609db5b3-e197-4a14-ba25-32371c847fae {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1227.163254] env[67008]: DEBUG nova.compute.manager [req-185276c8-4f62-49a4-808b-e9bced0acd57 req-3060e310-6520-407e-a700-095977a7116e service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Received event network-vif-plugged-609db5b3-e197-4a14-ba25-32371c847fae {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1227.163531] env[67008]: DEBUG oslo_concurrency.lockutils [req-185276c8-4f62-49a4-808b-e9bced0acd57 req-3060e310-6520-407e-a700-095977a7116e service nova] Acquiring lock "1f040a29-196b-4a5c-808f-53dc56f3facc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1227.163693] env[67008]: DEBUG oslo_concurrency.lockutils [req-185276c8-4f62-49a4-808b-e9bced0acd57 req-3060e310-6520-407e-a700-095977a7116e service nova] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1227.163856] env[67008]: DEBUG oslo_concurrency.lockutils [req-185276c8-4f62-49a4-808b-e9bced0acd57 req-3060e310-6520-407e-a700-095977a7116e service nova] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1227.164709] env[67008]: DEBUG nova.compute.manager [req-185276c8-4f62-49a4-808b-e9bced0acd57 req-3060e310-6520-407e-a700-095977a7116e service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] No waiting events found dispatching network-vif-plugged-609db5b3-e197-4a14-ba25-32371c847fae {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1227.164988] env[67008]: WARNING nova.compute.manager [req-185276c8-4f62-49a4-808b-e9bced0acd57 req-3060e310-6520-407e-a700-095977a7116e service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Received unexpected event network-vif-plugged-609db5b3-e197-4a14-ba25-32371c847fae for instance with vm_state building and task_state spawning. [ 1227.317021] env[67008]: DEBUG nova.network.neutron [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Successfully updated port: 609db5b3-e197-4a14-ba25-32371c847fae {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1227.330823] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "refresh_cache-1f040a29-196b-4a5c-808f-53dc56f3facc" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1227.330981] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired lock "refresh_cache-1f040a29-196b-4a5c-808f-53dc56f3facc" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1227.331146] env[67008]: DEBUG nova.network.neutron [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1227.367449] env[67008]: DEBUG nova.network.neutron [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1227.569653] env[67008]: DEBUG nova.network.neutron [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Updating instance_info_cache with network_info: [{"id": "609db5b3-e197-4a14-ba25-32371c847fae", "address": "fa:16:3e:3f:04:8b", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap609db5b3-e1", "ovs_interfaceid": "609db5b3-e197-4a14-ba25-32371c847fae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.589122] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Releasing lock "refresh_cache-1f040a29-196b-4a5c-808f-53dc56f3facc" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1227.589122] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Instance network_info: |[{"id": "609db5b3-e197-4a14-ba25-32371c847fae", "address": "fa:16:3e:3f:04:8b", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap609db5b3-e1", "ovs_interfaceid": "609db5b3-e197-4a14-ba25-32371c847fae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1227.589122] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:04:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '609db5b3-e197-4a14-ba25-32371c847fae', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.596496] env[67008]: DEBUG oslo.service.loopingcall [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.597010] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1227.598328] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bb289d3-9176-465f-af56-6eb5ede2abdb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.621299] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.621299] env[67008]: value = "task-2824935" [ 1227.621299] env[67008]: _type = "Task" [ 1227.621299] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.633409] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824935, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.133383] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824935, 'name': CreateVM_Task, 'duration_secs': 0.329361} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.133551] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1228.203905] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1228.203905] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1228.203905] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1228.203905] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24756a63-af38-4044-ab29-15985400d63f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.210560] env[67008]: DEBUG oslo_vmware.api [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for the task: (returnval){ [ 1228.210560] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ac0667-d7b7-2a7c-1407-d18e3cb311d9" [ 1228.210560] env[67008]: _type = "Task" [ 1228.210560] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.221129] env[67008]: DEBUG oslo_vmware.api [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ac0667-d7b7-2a7c-1407-d18e3cb311d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.683632] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "8632f87b-bab8-4df1-a403-a987b0769f8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1228.683632] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1228.721719] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1228.721719] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1228.721719] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1229.195752] env[67008]: DEBUG nova.compute.manager [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Received event network-changed-609db5b3-e197-4a14-ba25-32371c847fae {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1229.195958] env[67008]: DEBUG nova.compute.manager [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Refreshing instance network info cache due to event network-changed-609db5b3-e197-4a14-ba25-32371c847fae. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1229.196211] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] Acquiring lock "refresh_cache-1f040a29-196b-4a5c-808f-53dc56f3facc" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1229.196371] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] Acquired lock "refresh_cache-1f040a29-196b-4a5c-808f-53dc56f3facc" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1229.196510] env[67008]: DEBUG nova.network.neutron [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Refreshing network info cache for port 609db5b3-e197-4a14-ba25-32371c847fae {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1229.747354] env[67008]: DEBUG nova.network.neutron [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Updated VIF entry in instance network info cache for port 609db5b3-e197-4a14-ba25-32371c847fae. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1229.747895] env[67008]: DEBUG nova.network.neutron [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Updating instance_info_cache with network_info: [{"id": "609db5b3-e197-4a14-ba25-32371c847fae", "address": "fa:16:3e:3f:04:8b", "network": {"id": "42b596bd-f7a2-4a48-b019-9740815bf1da", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.168", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "26193804e2bc4e6fa9cf7c325c35a944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap609db5b3-e1", "ovs_interfaceid": "609db5b3-e197-4a14-ba25-32371c847fae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.760433] env[67008]: DEBUG oslo_concurrency.lockutils [req-3c5f6433-94a7-487a-88ad-41c5abd84c7a req-37d9e709-73fc-4d38-80ee-d7dd82bc1ffb service nova] Releasing lock "refresh_cache-1f040a29-196b-4a5c-808f-53dc56f3facc" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1234.479695] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "1f040a29-196b-4a5c-808f-53dc56f3facc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1240.313936] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7d7350a0-506e-4a7c-85e2-344f7a73d957 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Acquiring lock "98d0e65d-06a0-4487-88ee-014f9c3a483d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1240.314232] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7d7350a0-506e-4a7c-85e2-344f7a73d957 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "98d0e65d-06a0-4487-88ee-014f9c3a483d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1248.316593] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa7d7320-a235-4578-aad0-3c5a4630b486 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Acquiring lock "dba7a92a-87ef-462d-adee-d6a1eb044698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1248.316963] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa7d7320-a235-4578-aad0-3c5a4630b486 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Lock "dba7a92a-87ef-462d-adee-d6a1eb044698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1248.852484] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Acquiring lock "e7e2b81a-4876-45bf-8fc0-d35c25c8a77c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1248.852717] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "e7e2b81a-4876-45bf-8fc0-d35c25c8a77c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1248.877932] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Acquiring lock "bc882509-a6b8-494c-b334-3c60094ca4ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1248.878166] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "bc882509-a6b8-494c-b334-3c60094ca4ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1249.523775] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d41f6306-c2b4-488e-b565-b741dbd60fa4 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "b0d5a68a-8f4b-4959-a855-dbdc14adca6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1249.524133] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d41f6306-c2b4-488e-b565-b741dbd60fa4 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "b0d5a68a-8f4b-4959-a855-dbdc14adca6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1256.856660] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.869348] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1256.869691] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1256.869948] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1256.870139] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1256.871345] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19ac200-e205-45a9-aa1a-209dcd07ede3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.880540] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed00821f-eb9b-41d8-af9d-f0a75677fce5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.894729] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829a5aa1-6fe2-4354-a388-1d1160fc0bbe {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.901530] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27c552e-b5a6-457e-a82a-b9bc56a5f0ce {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.931025] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181064MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1256.931189] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1256.931382] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1257.036963] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.037146] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.037275] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.037395] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.037508] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.037618] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.037914] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.038057] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.038175] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.038288] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1257.050737] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0a52230a-d439-45dd-a908-bd698f94e841 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.065898] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.079761] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.092465] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.103708] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.117600] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 98d0e65d-06a0-4487-88ee-014f9c3a483d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.127844] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba7a92a-87ef-462d-adee-d6a1eb044698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.137307] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e7e2b81a-4876-45bf-8fc0-d35c25c8a77c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.147337] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bc882509-a6b8-494c-b334-3c60094ca4ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.156548] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b0d5a68a-8f4b-4959-a855-dbdc14adca6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1257.156772] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1257.156919] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1257.401935] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7db75d-c631-4ff9-b9bc-2afbb956e357 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.409106] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09d9485-4b81-4ab1-a1b7-419d972df6d6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.438567] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18e2a8a-b6d1-4856-9b20-a47f41293331 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.445448] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51146a3f-003e-4b18-bac0-9c5b35eac20b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.459087] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1257.467805] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1257.485082] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1257.485301] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.554s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1258.674850] env[67008]: DEBUG oslo_concurrency.lockutils [None req-dc1c3463-a622-446c-9716-03fff0c8b39a tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] Acquiring lock "f12d848f-2bed-4838-964b-fd1820160ddd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1258.675130] env[67008]: DEBUG oslo_concurrency.lockutils [None req-dc1c3463-a622-446c-9716-03fff0c8b39a tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] Lock "f12d848f-2bed-4838-964b-fd1820160ddd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1259.486068] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1259.856412] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.856767] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.857083] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.851645] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.856309] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.856437] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1262.857140] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.857496] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1262.857496] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1262.881432] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.881632] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.881771] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.881899] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.882032] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.882162] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.882282] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.882401] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.882519] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.882648] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1262.882772] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1264.857994] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.852660] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.005306] env[67008]: WARNING oslo_vmware.rw_handles [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1270.005306] env[67008]: ERROR oslo_vmware.rw_handles [ 1270.005821] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1270.009054] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1270.009054] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Copying Virtual Disk [datastore1] vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/25662dca-6539-4100-b14d-4fc7007b6892/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1270.009054] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3110ee5-3cb0-4968-88d2-09de9e24e370 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.016820] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Waiting for the task: (returnval){ [ 1270.016820] env[67008]: value = "task-2824936" [ 1270.016820] env[67008]: _type = "Task" [ 1270.016820] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.024548] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Task: {'id': task-2824936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.527452] env[67008]: DEBUG oslo_vmware.exceptions [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1270.527752] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1270.528330] env[67008]: ERROR nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1270.528330] env[67008]: Faults: ['InvalidArgument'] [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Traceback (most recent call last): [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] yield resources [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self.driver.spawn(context, instance, image_meta, [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self._fetch_image_if_missing(context, vi) [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] image_cache(vi, tmp_image_ds_loc) [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] vm_util.copy_virtual_disk( [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] session._wait_for_task(vmdk_copy_task) [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] return self.wait_for_task(task_ref) [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] return evt.wait() [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] result = hub.switch() [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] return self.greenlet.switch() [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self.f(*self.args, **self.kw) [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] raise exceptions.translate_fault(task_info.error) [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Faults: ['InvalidArgument'] [ 1270.528330] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] [ 1270.529116] env[67008]: INFO nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Terminating instance [ 1270.530325] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1270.530527] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1270.531159] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1270.531352] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1270.531579] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b8344f3-c722-42a6-85e3-9b1cd5f12f53 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.533832] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1f271c-2b2c-490c-94d9-9395a31a5ea0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.540627] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1270.540873] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59b0b287-b1a0-4fdc-8c6c-d0876b9b28c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.543038] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1270.543219] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1270.544177] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4cb8458-28fa-4489-8164-530fe49a47fd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.548819] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for the task: (returnval){ [ 1270.548819] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52911ef9-1044-1557-0ead-3d7a71c6ac2b" [ 1270.548819] env[67008]: _type = "Task" [ 1270.548819] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.555956] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52911ef9-1044-1557-0ead-3d7a71c6ac2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.617795] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1270.617998] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1270.618203] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Deleting the datastore file [datastore1] 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1270.618469] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6262a09c-33de-4bc2-96ed-cde0951835b0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.624304] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Waiting for the task: (returnval){ [ 1270.624304] env[67008]: value = "task-2824938" [ 1270.624304] env[67008]: _type = "Task" [ 1270.624304] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.632747] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Task: {'id': task-2824938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.059595] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1271.060018] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Creating directory with path [datastore1] vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1271.060300] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81ac19da-0298-46f5-816b-ac5aff9e608a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.071472] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Created directory with path [datastore1] vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1271.071661] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Fetch image to [datastore1] vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1271.071831] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1271.072589] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f55cb19-2e13-4a06-a756-cf28823a0f8b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.079128] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198e5734-9a2b-433b-9c86-2112cbac032a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.087875] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24764616-5ab9-4787-a48d-7416be9994cd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.117533] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24554ef-4cd0-4b42-9254-037a11902bbf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.122568] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ad01d21c-8c34-4376-b939-60270d5808b4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.132119] env[67008]: DEBUG oslo_vmware.api [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Task: {'id': task-2824938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062655} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.132343] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1271.132521] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1271.132688] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1271.132858] env[67008]: INFO nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1271.134901] env[67008]: DEBUG nova.compute.claims [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1271.135116] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1271.135342] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1271.147039] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1271.196538] env[67008]: DEBUG oslo_vmware.rw_handles [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1271.254863] env[67008]: DEBUG oslo_vmware.rw_handles [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1271.255070] env[67008]: DEBUG oslo_vmware.rw_handles [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1271.444500] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abef32b9-8a4c-4e46-be77-50793975dfd4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.451686] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b3ee5e-86db-4c48-86ea-8ccc7326585e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.481470] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc85acf-00c6-485e-91c8-6a2af2b488a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.488203] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174fd51c-50a0-4509-a9eb-1737bae8b2d5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.500837] env[67008]: DEBUG nova.compute.provider_tree [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.509421] env[67008]: DEBUG nova.scheduler.client.report [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1271.528556] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.393s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1271.529190] env[67008]: ERROR nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1271.529190] env[67008]: Faults: ['InvalidArgument'] [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Traceback (most recent call last): [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self.driver.spawn(context, instance, image_meta, [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self._fetch_image_if_missing(context, vi) [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] image_cache(vi, tmp_image_ds_loc) [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] vm_util.copy_virtual_disk( [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] session._wait_for_task(vmdk_copy_task) [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] return self.wait_for_task(task_ref) [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] return evt.wait() [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] result = hub.switch() [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] return self.greenlet.switch() [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] self.f(*self.args, **self.kw) [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] raise exceptions.translate_fault(task_info.error) [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Faults: ['InvalidArgument'] [ 1271.529190] env[67008]: ERROR nova.compute.manager [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] [ 1271.530012] env[67008]: DEBUG nova.compute.utils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1271.531463] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Build of instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b was re-scheduled: A specified parameter was not correct: fileType [ 1271.531463] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1271.531841] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1271.532057] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1271.532209] env[67008]: DEBUG nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1271.532372] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1271.910120] env[67008]: DEBUG nova.network.neutron [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.921630] env[67008]: INFO nova.compute.manager [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Took 0.39 seconds to deallocate network for instance. [ 1272.051292] env[67008]: INFO nova.scheduler.client.report [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Deleted allocations for instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b [ 1272.072632] env[67008]: DEBUG oslo_concurrency.lockutils [None req-90662390-a6c9-4b25-97d5-14b4da15fec0 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 661.293s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.073811] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 463.195s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1272.074063] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Acquiring lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.074294] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1272.074468] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.076705] env[67008]: INFO nova.compute.manager [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Terminating instance [ 1272.078372] env[67008]: DEBUG nova.compute.manager [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1272.078564] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1272.079030] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-253329e5-fed7-46db-b54d-0313049a1160 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.084058] env[67008]: DEBUG nova.compute.manager [None req-ebf0717f-0544-4cc0-9bec-e51647669d25 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: b976b36c-2847-483a-babb-77e58cdf3932] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.090188] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0712d13a-dd86-49f0-a7f8-9361329853c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.107914] env[67008]: DEBUG nova.compute.manager [None req-ebf0717f-0544-4cc0-9bec-e51647669d25 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: b976b36c-2847-483a-babb-77e58cdf3932] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.120985] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b could not be found. [ 1272.121204] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1272.121398] env[67008]: INFO nova.compute.manager [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1272.121899] env[67008]: DEBUG oslo.service.loopingcall [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1272.121899] env[67008]: DEBUG nova.compute.manager [-] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1272.121999] env[67008]: DEBUG nova.network.neutron [-] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1272.134678] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ebf0717f-0544-4cc0-9bec-e51647669d25 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "b976b36c-2847-483a-babb-77e58cdf3932" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.854s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.144934] env[67008]: DEBUG nova.compute.manager [None req-1db2b624-a2cf-4080-b168-98c6168b085d tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 0761816d-194d-44ab-97f0-4214157a1edf] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.147526] env[67008]: DEBUG nova.network.neutron [-] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.156562] env[67008]: INFO nova.compute.manager [-] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] Took 0.03 seconds to deallocate network for instance. [ 1272.177039] env[67008]: DEBUG nova.compute.manager [None req-1db2b624-a2cf-4080-b168-98c6168b085d tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 0761816d-194d-44ab-97f0-4214157a1edf] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.197846] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1db2b624-a2cf-4080-b168-98c6168b085d tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "0761816d-194d-44ab-97f0-4214157a1edf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.869s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.206924] env[67008]: DEBUG nova.compute.manager [None req-13525e14-b0b4-4cc9-a2f0-983dc2ec1711 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] [instance: 31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.233958] env[67008]: DEBUG nova.compute.manager [None req-13525e14-b0b4-4cc9-a2f0-983dc2ec1711 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] [instance: 31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.269394] env[67008]: DEBUG oslo_concurrency.lockutils [None req-13525e14-b0b4-4cc9-a2f0-983dc2ec1711 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Lock "31dc5bfb-d10c-4d1f-bbd9-524d91cb84f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.696s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.273774] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d7704e10-0da8-40cd-b682-eb99088eb111 tempest-ServersTestFqdnHostnames-766664531 tempest-ServersTestFqdnHostnames-766664531-project-member] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.274537] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 112.252s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1272.274722] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1272.274891] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "623dff5f-3c90-4aa0-82ac-5d0fd25dcc0b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.278198] env[67008]: DEBUG nova.compute.manager [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: 6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.300220] env[67008]: DEBUG nova.compute.manager [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: 6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.319463] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "6a8ca6f1-196d-41d3-8e29-e7ac99e3ddd4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.406s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.327993] env[67008]: DEBUG nova.compute.manager [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: e3812a2c-b59a-48cb-90b5-0b185351d3b9] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.350994] env[67008]: DEBUG nova.compute.manager [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: e3812a2c-b59a-48cb-90b5-0b185351d3b9] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.371803] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e2fd5b03-666e-49ce-9deb-ba05b5d3931d tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "e3812a2c-b59a-48cb-90b5-0b185351d3b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.433s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.380206] env[67008]: DEBUG nova.compute.manager [None req-4193674c-d760-41c5-bba9-ba11fdf5178f tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: fad05213-ac21-499c-b7fb-1929e9b3fca5] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.401841] env[67008]: DEBUG nova.compute.manager [None req-4193674c-d760-41c5-bba9-ba11fdf5178f tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: fad05213-ac21-499c-b7fb-1929e9b3fca5] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.421755] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4193674c-d760-41c5-bba9-ba11fdf5178f tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "fad05213-ac21-499c-b7fb-1929e9b3fca5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.559s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.430131] env[67008]: DEBUG nova.compute.manager [None req-a4463281-319a-40ba-8ca3-05c35681654d tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] [instance: 9617d5d7-7977-475c-8375-1a59ed302444] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.452512] env[67008]: DEBUG nova.compute.manager [None req-a4463281-319a-40ba-8ca3-05c35681654d tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] [instance: 9617d5d7-7977-475c-8375-1a59ed302444] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.473112] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a4463281-319a-40ba-8ca3-05c35681654d tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] Lock "9617d5d7-7977-475c-8375-1a59ed302444" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.312s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.481799] env[67008]: DEBUG nova.compute.manager [None req-5fc68f6c-ccf3-4d01-8292-6e8aa9761f9e tempest-ServerPasswordTestJSON-1865102295 tempest-ServerPasswordTestJSON-1865102295-project-member] [instance: 0a52230a-d439-45dd-a908-bd698f94e841] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.505463] env[67008]: DEBUG nova.compute.manager [None req-5fc68f6c-ccf3-4d01-8292-6e8aa9761f9e tempest-ServerPasswordTestJSON-1865102295 tempest-ServerPasswordTestJSON-1865102295-project-member] [instance: 0a52230a-d439-45dd-a908-bd698f94e841] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1272.529023] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5fc68f6c-ccf3-4d01-8292-6e8aa9761f9e tempest-ServerPasswordTestJSON-1865102295 tempest-ServerPasswordTestJSON-1865102295-project-member] Lock "0a52230a-d439-45dd-a908-bd698f94e841" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.868s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.540297] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1272.601304] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1272.601553] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1272.603074] env[67008]: INFO nova.compute.claims [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.848596] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f0eff7-0cba-4372-a398-e37303c73620 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.855935] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc69836-1e90-440c-86df-6c2c00eb52d9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.885396] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffeb3c8b-5b8c-4d6d-beb1-4eb9ffb106d5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.891910] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe32099-803f-4059-9161-1322d019b8df {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.904701] env[67008]: DEBUG nova.compute.provider_tree [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.914460] env[67008]: DEBUG nova.scheduler.client.report [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1272.927010] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.325s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1272.927484] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1272.960046] env[67008]: DEBUG nova.compute.utils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1272.961571] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1272.961711] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1272.975019] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1273.039061] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1273.067815] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1273.068108] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1273.068309] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1273.068502] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1273.068648] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1273.068792] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1273.068993] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1273.069162] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1273.069329] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1273.069486] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1273.069657] env[67008]: DEBUG nova.virt.hardware [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1273.070617] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19551dfa-e90a-4f27-9c8f-a232fff17ba5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.078919] env[67008]: DEBUG nova.policy [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5343dcbda10b4898b07cfc371ea9e355', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2ffa090d43c4facaec9fcb96575a5f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1273.081270] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7262ccf-3612-48d0-a6c1-91a37ec9c874 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.790184] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Successfully created port: 8ff1edfd-a948-424d-89ef-38c20bbf2ae3 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1274.321793] env[67008]: DEBUG nova.compute.manager [req-57d447a1-8575-4dbb-82b9-8ffb0161232a req-d5f46b05-e3f0-4b1c-b3b7-1272acb4eb29 service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Received event network-vif-plugged-8ff1edfd-a948-424d-89ef-38c20bbf2ae3 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1274.322071] env[67008]: DEBUG oslo_concurrency.lockutils [req-57d447a1-8575-4dbb-82b9-8ffb0161232a req-d5f46b05-e3f0-4b1c-b3b7-1272acb4eb29 service nova] Acquiring lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1274.322234] env[67008]: DEBUG oslo_concurrency.lockutils [req-57d447a1-8575-4dbb-82b9-8ffb0161232a req-d5f46b05-e3f0-4b1c-b3b7-1272acb4eb29 service nova] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1274.322402] env[67008]: DEBUG oslo_concurrency.lockutils [req-57d447a1-8575-4dbb-82b9-8ffb0161232a req-d5f46b05-e3f0-4b1c-b3b7-1272acb4eb29 service nova] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1274.322568] env[67008]: DEBUG nova.compute.manager [req-57d447a1-8575-4dbb-82b9-8ffb0161232a req-d5f46b05-e3f0-4b1c-b3b7-1272acb4eb29 service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] No waiting events found dispatching network-vif-plugged-8ff1edfd-a948-424d-89ef-38c20bbf2ae3 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1274.322728] env[67008]: WARNING nova.compute.manager [req-57d447a1-8575-4dbb-82b9-8ffb0161232a req-d5f46b05-e3f0-4b1c-b3b7-1272acb4eb29 service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Received unexpected event network-vif-plugged-8ff1edfd-a948-424d-89ef-38c20bbf2ae3 for instance with vm_state building and task_state spawning. [ 1274.400464] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Successfully updated port: 8ff1edfd-a948-424d-89ef-38c20bbf2ae3 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1274.414163] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "refresh_cache-042421f3-9b91-4fb2-bc3c-0d97e93ad78e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1274.414335] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "refresh_cache-042421f3-9b91-4fb2-bc3c-0d97e93ad78e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1274.414486] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1274.488083] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1274.780821] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Updating instance_info_cache with network_info: [{"id": "8ff1edfd-a948-424d-89ef-38c20bbf2ae3", "address": "fa:16:3e:7b:07:5d", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff1edfd-a9", "ovs_interfaceid": "8ff1edfd-a948-424d-89ef-38c20bbf2ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.795084] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "refresh_cache-042421f3-9b91-4fb2-bc3c-0d97e93ad78e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1274.795390] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Instance network_info: |[{"id": "8ff1edfd-a948-424d-89ef-38c20bbf2ae3", "address": "fa:16:3e:7b:07:5d", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff1edfd-a9", "ovs_interfaceid": "8ff1edfd-a948-424d-89ef-38c20bbf2ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1274.795779] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:07:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ff1edfd-a948-424d-89ef-38c20bbf2ae3', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1274.808128] env[67008]: DEBUG oslo.service.loopingcall [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1274.808638] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1274.808870] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e30ceff-45ad-4bb2-ac5a-8bed4548a694 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.829327] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1274.829327] env[67008]: value = "task-2824939" [ 1274.829327] env[67008]: _type = "Task" [ 1274.829327] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.836733] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824939, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.340874] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824939, 'name': CreateVM_Task, 'duration_secs': 0.283162} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.341199] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1275.341721] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1275.341882] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1275.342226] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1275.342464] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54f831fc-4eef-47f2-b25c-025dfcde7dea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.346599] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1275.346599] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c30b7c-096a-0f2e-b80e-17c4b70060b9" [ 1275.346599] env[67008]: _type = "Task" [ 1275.346599] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.355770] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c30b7c-096a-0f2e-b80e-17c4b70060b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.856917] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1275.857296] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1275.857385] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1276.355506] env[67008]: DEBUG nova.compute.manager [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Received event network-changed-8ff1edfd-a948-424d-89ef-38c20bbf2ae3 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1276.355762] env[67008]: DEBUG nova.compute.manager [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Refreshing instance network info cache due to event network-changed-8ff1edfd-a948-424d-89ef-38c20bbf2ae3. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1276.355762] env[67008]: DEBUG oslo_concurrency.lockutils [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] Acquiring lock "refresh_cache-042421f3-9b91-4fb2-bc3c-0d97e93ad78e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1276.355906] env[67008]: DEBUG oslo_concurrency.lockutils [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] Acquired lock "refresh_cache-042421f3-9b91-4fb2-bc3c-0d97e93ad78e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1276.356081] env[67008]: DEBUG nova.network.neutron [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Refreshing network info cache for port 8ff1edfd-a948-424d-89ef-38c20bbf2ae3 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1276.618429] env[67008]: DEBUG nova.network.neutron [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Updated VIF entry in instance network info cache for port 8ff1edfd-a948-424d-89ef-38c20bbf2ae3. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1276.618772] env[67008]: DEBUG nova.network.neutron [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Updating instance_info_cache with network_info: [{"id": "8ff1edfd-a948-424d-89ef-38c20bbf2ae3", "address": "fa:16:3e:7b:07:5d", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ff1edfd-a9", "ovs_interfaceid": "8ff1edfd-a948-424d-89ef-38c20bbf2ae3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.627844] env[67008]: DEBUG oslo_concurrency.lockutils [req-e277fedd-de56-476a-8c48-47c1d37b9158 req-d58ee1f0-cace-4ad6-920c-b2404680afbd service nova] Releasing lock "refresh_cache-042421f3-9b91-4fb2-bc3c-0d97e93ad78e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1280.949055] env[67008]: DEBUG oslo_concurrency.lockutils [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1287.888656] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1287.888925] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1288.139509] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4b47d448-f62c-4004-8ec8-3842c85d38ed tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "a5ce5bee-ddc9-4671-8750-6e554051315a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1288.139739] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4b47d448-f62c-4004-8ec8-3842c85d38ed tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "a5ce5bee-ddc9-4671-8750-6e554051315a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1316.373681] env[67008]: WARNING oslo_vmware.rw_handles [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1316.373681] env[67008]: ERROR oslo_vmware.rw_handles [ 1316.374362] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1316.376480] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1316.376740] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Copying Virtual Disk [datastore1] vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/53959062-a412-492f-9865-c9b228a2ddcb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1316.377075] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4add75a9-8105-478e-a62e-9e22cd41bd0d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.385351] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for the task: (returnval){ [ 1316.385351] env[67008]: value = "task-2824940" [ 1316.385351] env[67008]: _type = "Task" [ 1316.385351] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.393075] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': task-2824940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.895227] env[67008]: DEBUG oslo_vmware.exceptions [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1316.895515] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1316.896061] env[67008]: ERROR nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1316.896061] env[67008]: Faults: ['InvalidArgument'] [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Traceback (most recent call last): [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] yield resources [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self.driver.spawn(context, instance, image_meta, [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self._fetch_image_if_missing(context, vi) [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] image_cache(vi, tmp_image_ds_loc) [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] vm_util.copy_virtual_disk( [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] session._wait_for_task(vmdk_copy_task) [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] return self.wait_for_task(task_ref) [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] return evt.wait() [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] result = hub.switch() [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] return self.greenlet.switch() [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self.f(*self.args, **self.kw) [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] raise exceptions.translate_fault(task_info.error) [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Faults: ['InvalidArgument'] [ 1316.896061] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] [ 1316.896859] env[67008]: INFO nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Terminating instance [ 1316.897892] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1316.898111] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1316.898384] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ed2260b-9783-45f3-9870-74cf97cf52fd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.900575] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1316.900764] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1316.901522] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60a9fce-b3c5-4156-b193-e660b1ff46ac {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.908536] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1316.908774] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a789d912-af6e-4293-a5bd-9c6c098f33fa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.910910] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1316.911095] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1316.912052] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e558755-3fc1-4b50-ab51-88e36bdb441c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.917322] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1316.917322] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]525b037b-893c-090a-bdc2-73ac987cb1d7" [ 1316.917322] env[67008]: _type = "Task" [ 1316.917322] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.924429] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]525b037b-893c-090a-bdc2-73ac987cb1d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.983275] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1316.983738] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1316.983979] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Deleting the datastore file [datastore1] 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1316.984280] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d019f832-6676-4788-a448-7e1b6fe5e867 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.990789] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for the task: (returnval){ [ 1316.990789] env[67008]: value = "task-2824942" [ 1316.990789] env[67008]: _type = "Task" [ 1316.990789] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.999870] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': task-2824942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.427957] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1317.428332] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1317.428641] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf80d30f-a5c5-406b-824d-1f8188549679 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.440165] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1317.440466] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Fetch image to [datastore1] vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1317.440691] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1317.441473] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d036095-65ce-40ef-9cba-34db78fa7d54 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.448052] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d474b82c-38b4-4896-902b-41d735bd6cc3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.457178] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e922697c-fb9f-4970-996d-961c1eff7c4f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.487935] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1866a4e-4271-4a74-a331-06c92b99eba5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.495953] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a1883402-ef4e-412f-855c-d1a0419af6c7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.501109] env[67008]: DEBUG oslo_vmware.api [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': task-2824942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072972} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.501109] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1317.501109] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1317.501109] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1317.501302] env[67008]: INFO nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1317.505799] env[67008]: DEBUG nova.compute.claims [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1317.505993] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1317.506236] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1317.519561] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1317.573657] env[67008]: DEBUG oslo_vmware.rw_handles [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1317.636543] env[67008]: DEBUG oslo_vmware.rw_handles [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1317.636736] env[67008]: DEBUG oslo_vmware.rw_handles [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1317.843893] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f754bfdb-593a-40e5-a818-e37650d8904a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.852137] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8907d7-cd9e-465b-bc1b-379879bdf8fb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.883516] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7886dda-a397-4f4b-873f-edb22e4870f8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.890487] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b286b25d-6991-4d89-80fa-48fe5c67784c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.903488] env[67008]: DEBUG nova.compute.provider_tree [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.911624] env[67008]: DEBUG nova.scheduler.client.report [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1317.924850] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.419s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1317.925384] env[67008]: ERROR nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1317.925384] env[67008]: Faults: ['InvalidArgument'] [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Traceback (most recent call last): [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self.driver.spawn(context, instance, image_meta, [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self._fetch_image_if_missing(context, vi) [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] image_cache(vi, tmp_image_ds_loc) [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] vm_util.copy_virtual_disk( [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] session._wait_for_task(vmdk_copy_task) [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] return self.wait_for_task(task_ref) [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] return evt.wait() [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] result = hub.switch() [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] return self.greenlet.switch() [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] self.f(*self.args, **self.kw) [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] raise exceptions.translate_fault(task_info.error) [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Faults: ['InvalidArgument'] [ 1317.925384] env[67008]: ERROR nova.compute.manager [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] [ 1317.926204] env[67008]: DEBUG nova.compute.utils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1317.927397] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Build of instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 was re-scheduled: A specified parameter was not correct: fileType [ 1317.927397] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1317.927768] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1317.927937] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1317.928114] env[67008]: DEBUG nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1317.928309] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1318.302514] env[67008]: DEBUG nova.network.neutron [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.312601] env[67008]: INFO nova.compute.manager [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Took 0.38 seconds to deallocate network for instance. [ 1318.401574] env[67008]: INFO nova.scheduler.client.report [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Deleted allocations for instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 [ 1318.423800] env[67008]: DEBUG oslo_concurrency.lockutils [None req-907fedc8-fb17-4489-b843-0fd9a9c4fbc5 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 676.743s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1318.425033] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 478.215s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1318.425152] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1318.425297] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1318.425466] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1318.427407] env[67008]: INFO nova.compute.manager [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Terminating instance [ 1318.429150] env[67008]: DEBUG nova.compute.manager [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1318.429421] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1318.429802] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1cf6ad82-a3ca-4b03-87e7-f96cffe82586 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.433681] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1318.439976] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca492b36-504c-4ec3-be0a-2672463281f3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.469742] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 804b3e12-f8a6-46e7-ba00-93e0da2d23d5 could not be found. [ 1318.469953] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1318.470147] env[67008]: INFO nova.compute.manager [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1318.470425] env[67008]: DEBUG oslo.service.loopingcall [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1318.470669] env[67008]: DEBUG nova.compute.manager [-] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1318.470767] env[67008]: DEBUG nova.network.neutron [-] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1318.487552] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1318.488048] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1318.489467] env[67008]: INFO nova.compute.claims [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1318.498660] env[67008]: DEBUG nova.network.neutron [-] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.509379] env[67008]: INFO nova.compute.manager [-] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] Took 0.04 seconds to deallocate network for instance. [ 1318.623655] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2f5c1c8a-5e3e-4fa9-88e3-7174f109f5e4 tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1318.625917] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 158.602s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1318.625917] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 804b3e12-f8a6-46e7-ba00-93e0da2d23d5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1318.625917] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "804b3e12-f8a6-46e7-ba00-93e0da2d23d5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1318.774011] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a30f5e5-5280-46f2-9759-422088613a89 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.781680] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6a0b8f-0870-4c66-943e-de57b39cf72f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.810768] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530e8807-9637-4341-9402-108e42b74d21 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.817703] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f633901-6038-459d-9bf4-2cf35c3a8a21 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.831793] env[67008]: DEBUG nova.compute.provider_tree [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.840216] env[67008]: DEBUG nova.scheduler.client.report [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1318.853997] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.366s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1318.854531] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1318.856803] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1318.867708] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1318.867910] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1318.868210] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1318.868296] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1318.869537] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b5f78e-2f41-46c7-a2ee-1f3699a8dc3a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.876956] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f11fefd-47ab-4cf1-a35b-c7b6cfb05220 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.892041] env[67008]: DEBUG nova.compute.utils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1318.893527] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1dd187-d776-40cb-b9b0-5bea7bb899fa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.896229] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Not allocating networking since 'none' was specified. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 1318.900774] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896e96e4-cf7a-4a84-9be8-b198ff1b2e26 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.904536] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1318.933032] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181085MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1318.933202] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1318.933415] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1318.969320] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1318.993304] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1318.993547] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1318.993699] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1318.993876] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1318.994030] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1318.994180] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1318.994381] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1318.994540] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1318.994706] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1318.994862] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1318.995051] env[67008]: DEBUG nova.virt.hardware [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1318.996091] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5255559-18fc-4805-82a8-1711c8df1d47 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.003719] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82565d29-e894-440c-89f5-ef64b1dc51ad {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.007938] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 95604dd8-b797-440e-a844-af44609faa61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008264] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008392] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008510] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008624] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008737] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008847] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.008956] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.009078] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1319.021502] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance VIF info [] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1319.026954] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Creating folder: Project (dce6a1653f784a0ba98792d95114d811). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1319.027775] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-553cb8dc-5a20-4e61-acf3-1fda4be53a2e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.029572] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.039364] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Created folder: Project (dce6a1653f784a0ba98792d95114d811) in parent group-v567993. [ 1319.039540] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Creating folder: Instances. Parent ref: group-v568062. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1319.040072] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.040989] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b13dacc2-a6b1-481c-8082-7ce5b08aa39a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.049247] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 98d0e65d-06a0-4487-88ee-014f9c3a483d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.051216] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Created folder: Instances in parent group-v568062. [ 1319.051433] env[67008]: DEBUG oslo.service.loopingcall [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1319.051749] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1319.051938] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b55a090d-ad64-4148-81b7-f7781534ff91 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.063832] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba7a92a-87ef-462d-adee-d6a1eb044698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.069088] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1319.069088] env[67008]: value = "task-2824945" [ 1319.069088] env[67008]: _type = "Task" [ 1319.069088] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.073016] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e7e2b81a-4876-45bf-8fc0-d35c25c8a77c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.077102] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824945, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.081929] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bc882509-a6b8-494c-b334-3c60094ca4ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.090629] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b0d5a68a-8f4b-4959-a855-dbdc14adca6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.101170] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f12d848f-2bed-4838-964b-fd1820160ddd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.111735] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.122050] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance a5ce5bee-ddc9-4671-8750-6e554051315a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1319.122299] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1319.122452] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1319.353789] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad5017a-4224-4b7f-a990-f784f3526069 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.361144] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fae651e-d83d-47bc-a205-c63c51261502 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.392698] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bddd528-bcb8-4a03-a6d4-be166121b09a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.399391] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374c920a-c18e-4664-ab82-2dd4fb90b5c8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.412135] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.420342] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1319.433545] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1319.433818] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.500s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1319.578592] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824945, 'name': CreateVM_Task, 'duration_secs': 0.235272} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.578761] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1319.579198] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1319.579360] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1319.579674] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1319.579909] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98e30f47-5c51-47b8-9e61-c4e53b53ef8b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.584049] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Waiting for the task: (returnval){ [ 1319.584049] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]526e17e0-e7df-a43c-ad1a-57ab0c5b29a6" [ 1319.584049] env[67008]: _type = "Task" [ 1319.584049] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.592032] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]526e17e0-e7df-a43c-ad1a-57ab0c5b29a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.094216] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1320.094470] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1320.094676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1321.433718] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1321.856343] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1321.856621] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1321.856819] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.852180] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.855726] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.855882] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1323.856009] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1323.875184] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.875344] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.875544] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.875770] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.875984] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.876213] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.876434] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.876646] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.876859] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.877081] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1323.877229] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1323.877692] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.877835] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1325.856833] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.896045] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1365.036703] env[67008]: WARNING oslo_vmware.rw_handles [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1365.036703] env[67008]: ERROR oslo_vmware.rw_handles [ 1365.037372] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1365.039496] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1365.039772] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Copying Virtual Disk [datastore1] vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/9b1c07ac-cc20-497c-b065-d8f21e530a78/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1365.040068] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4f4af28-fc71-4b67-a2ea-b2def18d3d03 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.048007] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1365.048007] env[67008]: value = "task-2824946" [ 1365.048007] env[67008]: _type = "Task" [ 1365.048007] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.057331] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.559202] env[67008]: DEBUG oslo_vmware.exceptions [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1365.559476] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1365.560045] env[67008]: ERROR nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1365.560045] env[67008]: Faults: ['InvalidArgument'] [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] Traceback (most recent call last): [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] yield resources [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self.driver.spawn(context, instance, image_meta, [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self._fetch_image_if_missing(context, vi) [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] image_cache(vi, tmp_image_ds_loc) [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] vm_util.copy_virtual_disk( [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] session._wait_for_task(vmdk_copy_task) [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] return self.wait_for_task(task_ref) [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] return evt.wait() [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] result = hub.switch() [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] return self.greenlet.switch() [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self.f(*self.args, **self.kw) [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] raise exceptions.translate_fault(task_info.error) [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] Faults: ['InvalidArgument'] [ 1365.560045] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] [ 1365.561181] env[67008]: INFO nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Terminating instance [ 1365.561976] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1365.562190] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1365.562428] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01d78a2c-268a-4e1e-8b04-3f8c79b697fd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.564768] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1365.564957] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1365.565666] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eee29c9-5f0d-4a57-93be-95016829c570 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.573097] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1365.573372] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-781ecf36-aaf8-4893-bdb6-444ae9d3d948 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.574814] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1365.574994] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1365.575642] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-973b537b-e6ee-4921-a97d-cc8fa46cd12f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.581137] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Waiting for the task: (returnval){ [ 1365.581137] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52680eb8-9fbf-5288-bdf7-3abfcd3b27cb" [ 1365.581137] env[67008]: _type = "Task" [ 1365.581137] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.589662] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52680eb8-9fbf-5288-bdf7-3abfcd3b27cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.643932] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1365.644188] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1365.644375] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleting the datastore file [datastore1] 95604dd8-b797-440e-a844-af44609faa61 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1365.644655] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6414bca1-a0f6-437e-a193-7dd72fd6b5cf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.651276] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1365.651276] env[67008]: value = "task-2824948" [ 1365.651276] env[67008]: _type = "Task" [ 1365.651276] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.658611] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.091610] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1366.091959] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Creating directory with path [datastore1] vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1366.092245] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5520f4ee-bfcb-4d48-bc00-249a637b87e4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.103078] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Created directory with path [datastore1] vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1366.103224] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Fetch image to [datastore1] vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1366.103393] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1366.104132] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53ff79b-1264-4037-b087-8b04621148a9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.110769] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e7c4f5-9ce1-4300-a13f-ad62882a2405 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.119634] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98aa5e58-73d5-4f90-ba00-9c89e1a61aa2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.149606] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4aacf3-9a52-42f6-91bb-6b8306ad8223 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.161715] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-785409f3-e1f5-4580-b534-e45c0d346bac {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.163363] env[67008]: DEBUG oslo_vmware.api [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072158} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.163605] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.163780] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1366.163942] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1366.164139] env[67008]: INFO nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1366.166362] env[67008]: DEBUG nova.compute.claims [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1366.166530] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1366.166743] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1366.184653] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1366.330802] env[67008]: DEBUG oslo_vmware.rw_handles [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1366.389928] env[67008]: DEBUG oslo_vmware.rw_handles [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1366.390142] env[67008]: DEBUG oslo_vmware.rw_handles [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1366.499538] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985bf09f-bd1d-4088-8fad-f9cb35514a9b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.507095] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7ac8b4-c8d4-413b-a190-2766d6662168 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.537675] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c626ae87-2c46-4caa-951c-d3c5742559dd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.545478] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2954c103-1968-44f0-b64a-e51e20509055 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.559921] env[67008]: DEBUG nova.compute.provider_tree [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.568868] env[67008]: DEBUG nova.scheduler.client.report [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1366.582114] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.415s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1366.582660] env[67008]: ERROR nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1366.582660] env[67008]: Faults: ['InvalidArgument'] [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] Traceback (most recent call last): [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self.driver.spawn(context, instance, image_meta, [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self._fetch_image_if_missing(context, vi) [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] image_cache(vi, tmp_image_ds_loc) [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] vm_util.copy_virtual_disk( [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] session._wait_for_task(vmdk_copy_task) [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] return self.wait_for_task(task_ref) [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] return evt.wait() [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] result = hub.switch() [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] return self.greenlet.switch() [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] self.f(*self.args, **self.kw) [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] raise exceptions.translate_fault(task_info.error) [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] Faults: ['InvalidArgument'] [ 1366.582660] env[67008]: ERROR nova.compute.manager [instance: 95604dd8-b797-440e-a844-af44609faa61] [ 1366.583557] env[67008]: DEBUG nova.compute.utils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1366.584739] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Build of instance 95604dd8-b797-440e-a844-af44609faa61 was re-scheduled: A specified parameter was not correct: fileType [ 1366.584739] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1366.585160] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1366.585359] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1366.585541] env[67008]: DEBUG nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1366.585707] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1366.870363] env[67008]: DEBUG nova.network.neutron [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1366.881185] env[67008]: INFO nova.compute.manager [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Took 0.30 seconds to deallocate network for instance. [ 1366.981800] env[67008]: INFO nova.scheduler.client.report [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted allocations for instance 95604dd8-b797-440e-a844-af44609faa61 [ 1367.002732] env[67008]: DEBUG oslo_concurrency.lockutils [None req-248c291f-4326-496c-8b91-605ee0512bc1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "95604dd8-b797-440e-a844-af44609faa61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 676.887s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1367.003905] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "95604dd8-b797-440e-a844-af44609faa61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 479.880s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1367.004144] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "95604dd8-b797-440e-a844-af44609faa61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1367.004355] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "95604dd8-b797-440e-a844-af44609faa61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1367.004521] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "95604dd8-b797-440e-a844-af44609faa61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1367.006458] env[67008]: INFO nova.compute.manager [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Terminating instance [ 1367.008169] env[67008]: DEBUG nova.compute.manager [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1367.008362] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1367.008834] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-281a04bd-6d04-4d49-bfec-82cf7b5e1fe5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.013061] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1367.020331] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eadca7-dfbb-4ab2-a80d-a4bd36e99794 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.050890] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95604dd8-b797-440e-a844-af44609faa61 could not be found. [ 1367.051141] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1367.051319] env[67008]: INFO nova.compute.manager [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 95604dd8-b797-440e-a844-af44609faa61] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1367.051554] env[67008]: DEBUG oslo.service.loopingcall [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1367.054180] env[67008]: DEBUG nova.compute.manager [-] [instance: 95604dd8-b797-440e-a844-af44609faa61] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1367.054267] env[67008]: DEBUG nova.network.neutron [-] [instance: 95604dd8-b797-440e-a844-af44609faa61] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1367.068498] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1367.068763] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1367.070242] env[67008]: INFO nova.compute.claims [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1367.082257] env[67008]: DEBUG nova.network.neutron [-] [instance: 95604dd8-b797-440e-a844-af44609faa61] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.096995] env[67008]: INFO nova.compute.manager [-] [instance: 95604dd8-b797-440e-a844-af44609faa61] Took 0.04 seconds to deallocate network for instance. [ 1367.185014] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9c8b8219-498d-4960-b51a-f451a61eaa4a tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "95604dd8-b797-440e-a844-af44609faa61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1367.185860] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "95604dd8-b797-440e-a844-af44609faa61" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 207.163s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1367.186070] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 95604dd8-b797-440e-a844-af44609faa61] During sync_power_state the instance has a pending task (deleting). Skip. [ 1367.186245] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "95604dd8-b797-440e-a844-af44609faa61" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1367.355841] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0364490d-3d1c-4601-8c29-1cb2049f44d4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.363442] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b51fe3-dbbe-44a5-93e1-1d85d85568ed {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.394610] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2feea3f-9779-4378-b948-1b1aafa132a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.401641] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdaac630-092c-4aba-9528-575179aa0af9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.414462] env[67008]: DEBUG nova.compute.provider_tree [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.422468] env[67008]: DEBUG nova.scheduler.client.report [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1367.437046] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.368s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1367.437532] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1367.475432] env[67008]: DEBUG nova.compute.utils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1367.476828] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1367.476997] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1367.485549] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1367.561021] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1367.572222] env[67008]: DEBUG nova.policy [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '446098f8cb314cb8a999cce83b377778', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73bcd75224ba46ce808f5b052305728c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1367.585828] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1367.586072] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1367.586236] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1367.586417] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1367.586560] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1367.586706] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1367.586910] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1367.587079] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1367.587250] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1367.587410] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1367.587577] env[67008]: DEBUG nova.virt.hardware [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1367.588446] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32b054f-0acd-4861-89e4-d769c8d64e5c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.596512] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f829086c-70eb-4888-8fbb-be916db32988 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.895646] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Successfully created port: fefe3d50-bb32-4dc6-bbf8-0af71996e844 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1368.825140] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Successfully updated port: fefe3d50-bb32-4dc6-bbf8-0af71996e844 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1368.837079] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "refresh_cache-efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1368.838471] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired lock "refresh_cache-efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1368.838471] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1369.079762] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1369.211929] env[67008]: DEBUG nova.compute.manager [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Received event network-vif-plugged-fefe3d50-bb32-4dc6-bbf8-0af71996e844 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1369.212202] env[67008]: DEBUG oslo_concurrency.lockutils [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] Acquiring lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1369.212366] env[67008]: DEBUG oslo_concurrency.lockutils [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1369.212525] env[67008]: DEBUG oslo_concurrency.lockutils [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1369.212684] env[67008]: DEBUG nova.compute.manager [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] No waiting events found dispatching network-vif-plugged-fefe3d50-bb32-4dc6-bbf8-0af71996e844 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1369.212843] env[67008]: WARNING nova.compute.manager [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Received unexpected event network-vif-plugged-fefe3d50-bb32-4dc6-bbf8-0af71996e844 for instance with vm_state building and task_state spawning. [ 1369.212999] env[67008]: DEBUG nova.compute.manager [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Received event network-changed-fefe3d50-bb32-4dc6-bbf8-0af71996e844 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1369.213162] env[67008]: DEBUG nova.compute.manager [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Refreshing instance network info cache due to event network-changed-fefe3d50-bb32-4dc6-bbf8-0af71996e844. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1369.213319] env[67008]: DEBUG oslo_concurrency.lockutils [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] Acquiring lock "refresh_cache-efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1369.293692] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Updating instance_info_cache with network_info: [{"id": "fefe3d50-bb32-4dc6-bbf8-0af71996e844", "address": "fa:16:3e:98:cb:92", "network": {"id": "59eb95be-bced-401f-87ad-f8678fd917e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2123186140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73bcd75224ba46ce808f5b052305728c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfefe3d50-bb", "ovs_interfaceid": "fefe3d50-bb32-4dc6-bbf8-0af71996e844", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.307456] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Releasing lock "refresh_cache-efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1369.307739] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Instance network_info: |[{"id": "fefe3d50-bb32-4dc6-bbf8-0af71996e844", "address": "fa:16:3e:98:cb:92", "network": {"id": "59eb95be-bced-401f-87ad-f8678fd917e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2123186140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73bcd75224ba46ce808f5b052305728c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfefe3d50-bb", "ovs_interfaceid": "fefe3d50-bb32-4dc6-bbf8-0af71996e844", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1369.308047] env[67008]: DEBUG oslo_concurrency.lockutils [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] Acquired lock "refresh_cache-efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1369.308262] env[67008]: DEBUG nova.network.neutron [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Refreshing network info cache for port fefe3d50-bb32-4dc6-bbf8-0af71996e844 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1369.309217] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:cb:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fefe3d50-bb32-4dc6-bbf8-0af71996e844', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.316459] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating folder: Project (73bcd75224ba46ce808f5b052305728c). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1369.319137] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69779b53-8612-4531-aab3-e54f9209eac8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.329986] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Created folder: Project (73bcd75224ba46ce808f5b052305728c) in parent group-v567993. [ 1369.330184] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating folder: Instances. Parent ref: group-v568065. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1369.330406] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33257611-3a75-4b9f-a212-7b9fc4c07533 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.338542] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Created folder: Instances in parent group-v568065. [ 1369.338754] env[67008]: DEBUG oslo.service.loopingcall [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1369.338930] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1369.339137] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1fc0dd8-66f3-43df-9a16-f7f9208a5589 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.359329] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.359329] env[67008]: value = "task-2824951" [ 1369.359329] env[67008]: _type = "Task" [ 1369.359329] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.366416] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824951, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.656926] env[67008]: DEBUG nova.network.neutron [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Updated VIF entry in instance network info cache for port fefe3d50-bb32-4dc6-bbf8-0af71996e844. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1369.657313] env[67008]: DEBUG nova.network.neutron [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Updating instance_info_cache with network_info: [{"id": "fefe3d50-bb32-4dc6-bbf8-0af71996e844", "address": "fa:16:3e:98:cb:92", "network": {"id": "59eb95be-bced-401f-87ad-f8678fd917e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2123186140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73bcd75224ba46ce808f5b052305728c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfefe3d50-bb", "ovs_interfaceid": "fefe3d50-bb32-4dc6-bbf8-0af71996e844", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.667221] env[67008]: DEBUG oslo_concurrency.lockutils [req-b0ffd0f6-8345-46f6-a053-9ef07b44af8c req-3301025b-591e-49da-a11e-b38e884f9264 service nova] Releasing lock "refresh_cache-efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1369.869240] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824951, 'name': CreateVM_Task, 'duration_secs': 0.281491} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.869544] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1369.870076] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1369.870253] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1369.870557] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1369.870882] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df29d939-a37b-424c-a310-498342b91c28 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.875221] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 1369.875221] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c413c4-bf83-8d55-d35e-59334fbf28b8" [ 1369.875221] env[67008]: _type = "Task" [ 1369.875221] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.882652] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52c413c4-bf83-8d55-d35e-59334fbf28b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.385879] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1370.386156] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.386414] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1379.856685] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1379.868458] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1379.868667] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1379.868862] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1379.869032] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1379.870111] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0beba1-42a8-4731-98ab-301cbccb8c27 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.878829] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ab66ab-df34-4684-88c2-4ab0668db7fa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.892663] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc9fa34-a0b1-481b-9084-12cac36c1b45 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.898593] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d14c80-86ca-4cbe-98b4-45accc402d44 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.928175] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181072MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1379.928305] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1379.928489] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1380.001583] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.001750] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.001901] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.002043] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.002169] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.002285] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.002402] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.002516] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.002629] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.002742] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1380.013186] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.022715] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 98d0e65d-06a0-4487-88ee-014f9c3a483d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.031823] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba7a92a-87ef-462d-adee-d6a1eb044698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.041254] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e7e2b81a-4876-45bf-8fc0-d35c25c8a77c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.049687] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bc882509-a6b8-494c-b334-3c60094ca4ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.058502] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b0d5a68a-8f4b-4959-a855-dbdc14adca6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.067722] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f12d848f-2bed-4838-964b-fd1820160ddd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.076519] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.084560] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance a5ce5bee-ddc9-4671-8750-6e554051315a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1380.084780] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1380.084928] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1380.292839] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7dadd0-8914-4be9-950d-57d21879564c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.302044] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c470930d-c72d-48ca-86dc-b042c193dd25 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.331835] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c21eb2-cb30-42cc-a113-f70581d20a78 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.340075] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c713f85-2580-4bae-9c2c-d6e7d3edac31 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.020204] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.027998] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1381.040372] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1381.040549] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.112s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1383.040605] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.040957] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.041100] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.041256] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.857446] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.857684] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1383.857907] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1383.877641] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.877827] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.877923] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.878061] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.878196] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.878316] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.878433] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.878546] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.878658] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.878915] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1383.879119] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1383.879674] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.879861] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1385.874615] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1387.856901] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.852838] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.089393] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "0c45068a-d333-4247-841d-bf40ebb779da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1399.089843] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "0c45068a-d333-4247-841d-bf40ebb779da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1400.070641] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1400.071229] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1406.445455] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1412.663496] env[67008]: WARNING oslo_vmware.rw_handles [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1412.663496] env[67008]: ERROR oslo_vmware.rw_handles [ 1412.664066] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1412.666750] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1412.667159] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Copying Virtual Disk [datastore1] vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/ebb5549b-6097-4029-837f-dfa6fd340c93/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1412.667159] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71eda7fd-f396-4b55-8982-60d0a1c7550f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.677364] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Waiting for the task: (returnval){ [ 1412.677364] env[67008]: value = "task-2824952" [ 1412.677364] env[67008]: _type = "Task" [ 1412.677364] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.685785] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Task: {'id': task-2824952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.187555] env[67008]: DEBUG oslo_vmware.exceptions [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1413.189085] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1413.189085] env[67008]: ERROR nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1413.189085] env[67008]: Faults: ['InvalidArgument'] [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Traceback (most recent call last): [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] yield resources [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self.driver.spawn(context, instance, image_meta, [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self._fetch_image_if_missing(context, vi) [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] image_cache(vi, tmp_image_ds_loc) [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] vm_util.copy_virtual_disk( [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] session._wait_for_task(vmdk_copy_task) [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] return self.wait_for_task(task_ref) [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] return evt.wait() [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] result = hub.switch() [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] return self.greenlet.switch() [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self.f(*self.args, **self.kw) [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] raise exceptions.translate_fault(task_info.error) [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Faults: ['InvalidArgument'] [ 1413.189085] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] [ 1413.189085] env[67008]: INFO nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Terminating instance [ 1413.190442] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1413.190647] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.190882] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e208c74-6757-471f-87fc-4e159c35a9a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.193044] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1413.193239] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1413.193959] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cea260-6f84-4234-b49d-0a4efdf568c4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.200761] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1413.200967] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93032a3f-28b2-4a96-974c-97f1d1bdf8c1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.203034] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.203209] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1413.204124] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a775b794-0f4d-4bd9-8191-adc6987c74f2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.209133] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Waiting for the task: (returnval){ [ 1413.209133] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52667c1a-55fe-18c0-15c0-3924759ff2d8" [ 1413.209133] env[67008]: _type = "Task" [ 1413.209133] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.216178] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52667c1a-55fe-18c0-15c0-3924759ff2d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.613713] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1413.613935] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1413.614132] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Deleting the datastore file [datastore1] 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1413.614399] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87cf0be1-c626-4161-834d-3003ca8ae9cd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.619917] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Waiting for the task: (returnval){ [ 1413.619917] env[67008]: value = "task-2824954" [ 1413.619917] env[67008]: _type = "Task" [ 1413.619917] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.627928] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Task: {'id': task-2824954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.718989] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1413.719305] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Creating directory with path [datastore1] vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.719543] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5811d941-ebc8-418e-b770-e1e07e3d0e7c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.729918] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Created directory with path [datastore1] vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.730147] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Fetch image to [datastore1] vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1413.730316] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1413.730990] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1d1154-13fe-4fed-8a76-77a9dcb68185 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.737232] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c88fd08-a5c4-43f5-9251-e9fd73b6562c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.745908] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc86495d-57e6-4fe0-b684-0deae787bca3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.776557] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdda974-b42a-48e8-a799-e4d5e54e1850 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.782184] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a8d11521-c602-4f17-9648-e7d9126f31e8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.801554] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1413.855901] env[67008]: DEBUG oslo_vmware.rw_handles [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1413.918235] env[67008]: DEBUG oslo_vmware.rw_handles [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1413.918437] env[67008]: DEBUG oslo_vmware.rw_handles [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1414.130614] env[67008]: DEBUG oslo_vmware.api [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Task: {'id': task-2824954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067473} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.130802] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1414.130981] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1414.131213] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1414.131394] env[67008]: INFO nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Took 0.94 seconds to destroy the instance on the hypervisor. [ 1414.133523] env[67008]: DEBUG nova.compute.claims [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1414.133690] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1414.133897] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1414.406577] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab87981-24ca-4b5a-8788-5bf50be02b80 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.414044] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a86fc23-78f7-43fe-9b4e-f91d5a25761f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.443885] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1e5ffd-dc10-47c2-8162-14b7875e49c2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.450946] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69316602-5563-4ad2-a038-5b327c593315 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.463886] env[67008]: DEBUG nova.compute.provider_tree [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1414.473783] env[67008]: DEBUG nova.scheduler.client.report [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1414.497541] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1414.498112] env[67008]: ERROR nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1414.498112] env[67008]: Faults: ['InvalidArgument'] [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Traceback (most recent call last): [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self.driver.spawn(context, instance, image_meta, [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self._fetch_image_if_missing(context, vi) [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] image_cache(vi, tmp_image_ds_loc) [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] vm_util.copy_virtual_disk( [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] session._wait_for_task(vmdk_copy_task) [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] return self.wait_for_task(task_ref) [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] return evt.wait() [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] result = hub.switch() [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] return self.greenlet.switch() [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] self.f(*self.args, **self.kw) [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] raise exceptions.translate_fault(task_info.error) [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Faults: ['InvalidArgument'] [ 1414.498112] env[67008]: ERROR nova.compute.manager [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] [ 1414.498847] env[67008]: DEBUG nova.compute.utils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1414.500344] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Build of instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d was re-scheduled: A specified parameter was not correct: fileType [ 1414.500344] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1414.500710] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1414.500876] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1414.501054] env[67008]: DEBUG nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1414.501284] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1414.982719] env[67008]: DEBUG nova.network.neutron [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.001028] env[67008]: INFO nova.compute.manager [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Took 0.50 seconds to deallocate network for instance. [ 1415.102646] env[67008]: INFO nova.scheduler.client.report [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Deleted allocations for instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d [ 1415.125663] env[67008]: DEBUG oslo_concurrency.lockutils [None req-e9586a83-338d-4104-bea0-5ffa4305c176 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 671.833s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1415.126573] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 475.430s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1415.126785] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Acquiring lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1415.126985] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1415.127164] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1415.129401] env[67008]: INFO nova.compute.manager [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Terminating instance [ 1415.131456] env[67008]: DEBUG nova.compute.manager [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1415.131698] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1415.132207] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ecd5121-e5b7-43d9-a604-56b274d03a16 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.141481] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1037a8-3a2f-4ecf-8e5c-710db4012ee1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.152338] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1415.172190] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d could not be found. [ 1415.173029] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1415.173029] env[67008]: INFO nova.compute.manager [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1415.173029] env[67008]: DEBUG oslo.service.loopingcall [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.173195] env[67008]: DEBUG nova.compute.manager [-] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1415.173226] env[67008]: DEBUG nova.network.neutron [-] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1415.194481] env[67008]: DEBUG nova.network.neutron [-] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.198283] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1415.198503] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1415.199898] env[67008]: INFO nova.compute.claims [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.202872] env[67008]: INFO nova.compute.manager [-] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] Took 0.03 seconds to deallocate network for instance. [ 1415.285657] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f8bb052a-1c17-4192-bc1f-f476cead3aa0 tempest-ServerTagsTestJSON-945932657 tempest-ServerTagsTestJSON-945932657-project-member] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1415.286502] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 255.263s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1415.286687] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1415.286859] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "01bf22df-11c2-4f1f-8083-0ba7fd6c7c6d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1415.459394] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882015b3-4c8a-477e-a294-b8f68839a33f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.466792] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2ed44f-6f96-4d2a-84b9-3fa1a3dfe6c9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.496653] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fd8a07-53c9-40b9-a709-67296eb1bae4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.503851] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39db432c-1ce8-4eb3-9d25-0f0c7dbe1f58 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.516677] env[67008]: DEBUG nova.compute.provider_tree [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1415.525099] env[67008]: DEBUG nova.scheduler.client.report [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1415.539385] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.341s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1415.540033] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1415.591330] env[67008]: DEBUG nova.compute.utils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1415.592627] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1415.592794] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1415.607635] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1415.677168] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1415.691046] env[67008]: DEBUG nova.policy [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '600b5ac3d9ec4b9393ab3ce5f8011bbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '201882fe0912402db899bfa30cec9388', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1415.702161] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1415.702834] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1415.703165] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1415.703562] env[67008]: DEBUG nova.virt.hardware [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1415.704591] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5721f7-ea1a-49e4-bfaf-e3ef5ae5b3b2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.713124] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4374ef8-8764-4f5e-a638-701f7280b03a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.075894] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Successfully created port: cc0824e8-99cd-486c-a361-fdea5b1cad57 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1416.740020] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Successfully updated port: cc0824e8-99cd-486c-a361-fdea5b1cad57 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1416.755061] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "refresh_cache-8632f87b-bab8-4df1-a403-a987b0769f8e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1416.755061] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquired lock "refresh_cache-8632f87b-bab8-4df1-a403-a987b0769f8e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1416.755061] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1416.820571] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1417.048163] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Updating instance_info_cache with network_info: [{"id": "cc0824e8-99cd-486c-a361-fdea5b1cad57", "address": "fa:16:3e:71:47:e5", "network": {"id": "8a0800cb-cb69-4930-815b-f35af21803bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1331350078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "201882fe0912402db899bfa30cec9388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0824e8-99", "ovs_interfaceid": "cc0824e8-99cd-486c-a361-fdea5b1cad57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.060947] env[67008]: DEBUG nova.compute.manager [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Received event network-vif-plugged-cc0824e8-99cd-486c-a361-fdea5b1cad57 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1417.061206] env[67008]: DEBUG oslo_concurrency.lockutils [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] Acquiring lock "8632f87b-bab8-4df1-a403-a987b0769f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1417.061420] env[67008]: DEBUG oslo_concurrency.lockutils [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1417.063016] env[67008]: DEBUG oslo_concurrency.lockutils [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1417.063016] env[67008]: DEBUG nova.compute.manager [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] No waiting events found dispatching network-vif-plugged-cc0824e8-99cd-486c-a361-fdea5b1cad57 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1417.063016] env[67008]: WARNING nova.compute.manager [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Received unexpected event network-vif-plugged-cc0824e8-99cd-486c-a361-fdea5b1cad57 for instance with vm_state building and task_state spawning. [ 1417.063016] env[67008]: DEBUG nova.compute.manager [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Received event network-changed-cc0824e8-99cd-486c-a361-fdea5b1cad57 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1417.063016] env[67008]: DEBUG nova.compute.manager [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Refreshing instance network info cache due to event network-changed-cc0824e8-99cd-486c-a361-fdea5b1cad57. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1417.063016] env[67008]: DEBUG oslo_concurrency.lockutils [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] Acquiring lock "refresh_cache-8632f87b-bab8-4df1-a403-a987b0769f8e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1417.064826] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Releasing lock "refresh_cache-8632f87b-bab8-4df1-a403-a987b0769f8e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1417.065259] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Instance network_info: |[{"id": "cc0824e8-99cd-486c-a361-fdea5b1cad57", "address": "fa:16:3e:71:47:e5", "network": {"id": "8a0800cb-cb69-4930-815b-f35af21803bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1331350078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "201882fe0912402db899bfa30cec9388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0824e8-99", "ovs_interfaceid": "cc0824e8-99cd-486c-a361-fdea5b1cad57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1417.066133] env[67008]: DEBUG oslo_concurrency.lockutils [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] Acquired lock "refresh_cache-8632f87b-bab8-4df1-a403-a987b0769f8e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1417.066350] env[67008]: DEBUG nova.network.neutron [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Refreshing network info cache for port cc0824e8-99cd-486c-a361-fdea5b1cad57 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1417.067343] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:47:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9ec24851-7bb6-426b-b28f-f7b246df1713', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc0824e8-99cd-486c-a361-fdea5b1cad57', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1417.075419] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Creating folder: Project (201882fe0912402db899bfa30cec9388). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1417.076692] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2078c0c3-fc95-494e-a5cd-25736418d7ef {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.090773] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Created folder: Project (201882fe0912402db899bfa30cec9388) in parent group-v567993. [ 1417.090957] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Creating folder: Instances. Parent ref: group-v568068. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1417.091218] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a41c02f7-9941-4176-8769-177c556aa77c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.099414] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Created folder: Instances in parent group-v568068. [ 1417.099649] env[67008]: DEBUG oslo.service.loopingcall [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1417.099841] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1417.100054] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91cf3d77-9d66-45d7-861f-9efe13020271 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.120217] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1417.120217] env[67008]: value = "task-2824957" [ 1417.120217] env[67008]: _type = "Task" [ 1417.120217] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.127880] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824957, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.402182] env[67008]: DEBUG nova.network.neutron [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Updated VIF entry in instance network info cache for port cc0824e8-99cd-486c-a361-fdea5b1cad57. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1417.402583] env[67008]: DEBUG nova.network.neutron [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Updating instance_info_cache with network_info: [{"id": "cc0824e8-99cd-486c-a361-fdea5b1cad57", "address": "fa:16:3e:71:47:e5", "network": {"id": "8a0800cb-cb69-4930-815b-f35af21803bc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1331350078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "201882fe0912402db899bfa30cec9388", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9ec24851-7bb6-426b-b28f-f7b246df1713", "external-id": "nsx-vlan-transportzone-359", "segmentation_id": 359, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0824e8-99", "ovs_interfaceid": "cc0824e8-99cd-486c-a361-fdea5b1cad57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.412265] env[67008]: DEBUG oslo_concurrency.lockutils [req-7952130c-5c63-46d1-a01f-af06aea6ce92 req-71875e42-c7d1-41a2-890d-c64b2d3fd7d8 service nova] Releasing lock "refresh_cache-8632f87b-bab8-4df1-a403-a987b0769f8e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1417.629721] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824957, 'name': CreateVM_Task, 'duration_secs': 0.312697} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.629892] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1417.630569] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1417.630734] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1417.631078] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1417.631393] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e13160e8-197b-4174-b44f-97c17eb4bb7f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.635627] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Waiting for the task: (returnval){ [ 1417.635627] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f9f501-359a-dd84-bc57-263b9928d114" [ 1417.635627] env[67008]: _type = "Task" [ 1417.635627] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.643014] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f9f501-359a-dd84-bc57-263b9928d114, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.145869] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1418.146174] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1418.146358] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1424.906050] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "8632f87b-bab8-4df1-a403-a987b0769f8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1436.193862] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1436.194216] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1436.715415] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea46af2f-a6e5-45c2-8635-542a805358c8 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Acquiring lock "db85cf50-33c1-4433-ad83-cd33ee24811b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1436.715415] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea46af2f-a6e5-45c2-8635-542a805358c8 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "db85cf50-33c1-4433-ad83-cd33ee24811b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1439.858064] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.864604] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.864959] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.876275] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1441.876483] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1441.876650] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1441.876801] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1441.877870] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2a4210-0850-41b7-815f-2274eee46c73 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.886555] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99994c0-5aad-4390-a6e5-0548d2577fdb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.900116] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb20bebb-5ec1-489f-8e81-48f27b7596c1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.905956] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5c2c4e-d43f-4c9e-88e6-d8a8622ecd6d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.934131] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181078MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1441.934279] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1441.934467] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1442.043861] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.044220] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.045043] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.045043] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.045043] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.045043] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.045043] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.046327] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.046327] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.046327] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1442.074572] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance dba7a92a-87ef-462d-adee-d6a1eb044698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.086061] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance e7e2b81a-4876-45bf-8fc0-d35c25c8a77c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.096769] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bc882509-a6b8-494c-b334-3c60094ca4ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.107091] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance b0d5a68a-8f4b-4959-a855-dbdc14adca6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.120309] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance f12d848f-2bed-4838-964b-fd1820160ddd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.129740] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.139453] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance a5ce5bee-ddc9-4671-8750-6e554051315a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.149990] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.158722] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.167829] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.178516] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance db85cf50-33c1-4433-ad83-cd33ee24811b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1442.178750] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1442.178899] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1442.414274] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119b64e3-51dd-441a-b694-3ba40efabf53 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.421952] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7378826-045b-4fc0-b4d3-bb55454da57f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.450934] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebacd86-cdf7-42d0-a116-9d5b6d21147a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.457855] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e516747-98d0-459c-bcc2-6f371d3bfa0e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.471567] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1442.479692] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1442.493286] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1442.493474] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.559s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1443.485839] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.857229] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.857418] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1443.857548] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1443.880315] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.880489] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.880621] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.880747] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.880871] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.880993] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.881419] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.881597] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.881729] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.881850] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1443.881971] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1444.856586] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.856885] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.237755] env[67008]: DEBUG oslo_concurrency.lockutils [None req-28d3e009-4770-4e30-8dbb-717a0f2c595a tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Acquiring lock "c578d694-4652-4289-81f4-2b00ba20d7fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1445.237992] env[67008]: DEBUG oslo_concurrency.lockutils [None req-28d3e009-4770-4e30-8dbb-717a0f2c595a tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Lock "c578d694-4652-4289-81f4-2b00ba20d7fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1445.856181] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.856382] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1446.843448] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ca7d601c-c4dd-4da9-bc0c-9a283401e57a tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "2d01d2f3-212a-41c1-8b00-ca9f7090f239" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1446.843745] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ca7d601c-c4dd-4da9-bc0c-9a283401e57a tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "2d01d2f3-212a-41c1-8b00-ca9f7090f239" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1447.852646] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.857395] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.857145] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.857410] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1455.869660] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] There are 0 instances to clean {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1455.869909] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.870064] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances with incomplete migration {{(pid=67008) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1462.864193] env[67008]: WARNING oslo_vmware.rw_handles [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1462.864193] env[67008]: ERROR oslo_vmware.rw_handles [ 1462.864193] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1462.865472] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1462.865699] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Copying Virtual Disk [datastore1] vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/0751fba9-19f6-4033-b9d4-ef7ad0e7c52c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1462.865954] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-132d5795-365e-413e-bcb8-4994251dd718 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.873614] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Waiting for the task: (returnval){ [ 1462.873614] env[67008]: value = "task-2824958" [ 1462.873614] env[67008]: _type = "Task" [ 1462.873614] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.881406] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Task: {'id': task-2824958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.384199] env[67008]: DEBUG oslo_vmware.exceptions [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1463.384475] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1463.385054] env[67008]: ERROR nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1463.385054] env[67008]: Faults: ['InvalidArgument'] [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Traceback (most recent call last): [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] yield resources [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self.driver.spawn(context, instance, image_meta, [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self._fetch_image_if_missing(context, vi) [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] image_cache(vi, tmp_image_ds_loc) [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] vm_util.copy_virtual_disk( [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] session._wait_for_task(vmdk_copy_task) [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] return self.wait_for_task(task_ref) [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] return evt.wait() [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] result = hub.switch() [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] return self.greenlet.switch() [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self.f(*self.args, **self.kw) [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] raise exceptions.translate_fault(task_info.error) [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Faults: ['InvalidArgument'] [ 1463.385054] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] [ 1463.386094] env[67008]: INFO nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Terminating instance [ 1463.386876] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1463.387100] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1463.387342] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52ac5592-0a4b-4108-8e8f-a4db599e5d93 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.389496] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1463.389703] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1463.390423] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3690bdd-62e1-4f0f-b864-33b1fc6fde24 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.397432] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1463.397731] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ef77c53-5fe4-4901-865f-5dd3f8ba350d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.400833] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1463.401133] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1463.402552] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687f1c72-8cf3-4ce0-b6e6-483377df6a63 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.407562] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Waiting for the task: (returnval){ [ 1463.407562] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f943bb-287c-0145-f5ff-384ee75a34d5" [ 1463.407562] env[67008]: _type = "Task" [ 1463.407562] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.415343] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f943bb-287c-0145-f5ff-384ee75a34d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.509964] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1463.510274] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1463.510494] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Deleting the datastore file [datastore1] 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1463.510807] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9876e341-8a6f-4c69-aaca-908a6497be81 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.517125] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Waiting for the task: (returnval){ [ 1463.517125] env[67008]: value = "task-2824960" [ 1463.517125] env[67008]: _type = "Task" [ 1463.517125] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.525652] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Task: {'id': task-2824960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.918029] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1463.918295] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Creating directory with path [datastore1] vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1463.918372] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64d3a6bf-eb92-4548-9c91-4e50dbe9d754 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.929941] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Created directory with path [datastore1] vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1463.930154] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Fetch image to [datastore1] vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1463.930323] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1463.931077] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f03f5d-2ce4-4bab-9b23-83b7669e3c6d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.937751] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deec122-e932-402f-a1e1-9d72f7859935 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.947311] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55dbe4f6-2c8f-4749-82b1-87c11da58185 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.977786] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1b229e-5832-423d-a01c-acf086ef6eeb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.983549] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d02d6128-c79e-4795-9d1a-55140f0e77f5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.002469] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1464.025091] env[67008]: DEBUG oslo_vmware.api [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Task: {'id': task-2824960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070501} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.025340] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1464.025521] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1464.025727] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1464.025926] env[67008]: INFO nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1464.027941] env[67008]: DEBUG nova.compute.claims [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1464.028138] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1464.028355] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1464.180484] env[67008]: DEBUG oslo_vmware.rw_handles [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1464.240404] env[67008]: DEBUG oslo_vmware.rw_handles [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1464.240613] env[67008]: DEBUG oslo_vmware.rw_handles [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1464.328096] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e0ce64-bd82-4adb-a1d1-cd7c3a683802 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.335543] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe93af3-717a-41f3-b674-270ffe27ed92 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.365719] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ac8a1f-32d0-4a5d-b746-4f79149cbb8e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.372296] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984d0dd8-3714-43d4-ac68-57b9cc9bb204 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.384894] env[67008]: DEBUG nova.compute.provider_tree [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.392844] env[67008]: DEBUG nova.scheduler.client.report [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1464.407539] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.379s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1464.408107] env[67008]: ERROR nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1464.408107] env[67008]: Faults: ['InvalidArgument'] [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Traceback (most recent call last): [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self.driver.spawn(context, instance, image_meta, [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self._fetch_image_if_missing(context, vi) [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] image_cache(vi, tmp_image_ds_loc) [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] vm_util.copy_virtual_disk( [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] session._wait_for_task(vmdk_copy_task) [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] return self.wait_for_task(task_ref) [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] return evt.wait() [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] result = hub.switch() [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] return self.greenlet.switch() [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] self.f(*self.args, **self.kw) [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] raise exceptions.translate_fault(task_info.error) [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Faults: ['InvalidArgument'] [ 1464.408107] env[67008]: ERROR nova.compute.manager [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] [ 1464.408869] env[67008]: DEBUG nova.compute.utils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1464.410209] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Build of instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 was re-scheduled: A specified parameter was not correct: fileType [ 1464.410209] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1464.410573] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1464.410742] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1464.410905] env[67008]: DEBUG nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1464.411079] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1465.111832] env[67008]: DEBUG nova.network.neutron [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.122127] env[67008]: INFO nova.compute.manager [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Took 0.71 seconds to deallocate network for instance. [ 1465.228635] env[67008]: INFO nova.scheduler.client.report [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Deleted allocations for instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 [ 1465.248374] env[67008]: DEBUG oslo_concurrency.lockutils [None req-b115d9cf-f8f1-4da8-bcb6-69decc8e1731 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 649.646s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.249469] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 452.269s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1465.249715] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Acquiring lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1465.249901] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1465.250075] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.252269] env[67008]: INFO nova.compute.manager [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Terminating instance [ 1465.253939] env[67008]: DEBUG nova.compute.manager [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1465.254106] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1465.254609] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69557127-b080-4e48-aacf-4ee2c226835d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.264026] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf6125c-7cbe-4dd7-8eec-7fed6b5e30d4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.274358] env[67008]: DEBUG nova.compute.manager [None req-7d7350a0-506e-4a7c-85e2-344f7a73d957 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: 98d0e65d-06a0-4487-88ee-014f9c3a483d] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1465.295572] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9 could not be found. [ 1465.296207] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1465.296207] env[67008]: INFO nova.compute.manager [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1465.296207] env[67008]: DEBUG oslo.service.loopingcall [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1465.296404] env[67008]: DEBUG nova.compute.manager [-] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1465.296508] env[67008]: DEBUG nova.network.neutron [-] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1465.299067] env[67008]: DEBUG nova.compute.manager [None req-7d7350a0-506e-4a7c-85e2-344f7a73d957 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: 98d0e65d-06a0-4487-88ee-014f9c3a483d] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1465.321164] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7d7350a0-506e-4a7c-85e2-344f7a73d957 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "98d0e65d-06a0-4487-88ee-014f9c3a483d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.007s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.326889] env[67008]: DEBUG nova.network.neutron [-] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.331630] env[67008]: DEBUG nova.compute.manager [None req-aa7d7320-a235-4578-aad0-3c5a4630b486 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] [instance: dba7a92a-87ef-462d-adee-d6a1eb044698] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1465.334744] env[67008]: INFO nova.compute.manager [-] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] Took 0.04 seconds to deallocate network for instance. [ 1465.356144] env[67008]: DEBUG nova.compute.manager [None req-aa7d7320-a235-4578-aad0-3c5a4630b486 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] [instance: dba7a92a-87ef-462d-adee-d6a1eb044698] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1465.378538] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa7d7320-a235-4578-aad0-3c5a4630b486 tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Lock "dba7a92a-87ef-462d-adee-d6a1eb044698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.062s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.389153] env[67008]: DEBUG nova.compute.manager [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: e7e2b81a-4876-45bf-8fc0-d35c25c8a77c] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1465.414741] env[67008]: DEBUG nova.compute.manager [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: e7e2b81a-4876-45bf-8fc0-d35c25c8a77c] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1465.432350] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a682b3e1-b67c-4b0d-85c9-5f36cc56ba17 tempest-ServersTestMultiNic-1060338160 tempest-ServersTestMultiNic-1060338160-project-member] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.433131] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 305.410s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1465.433424] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1465.433606] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "3a0c8800-ec91-4e6b-a2a2-e01cd4a33be9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.436707] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "e7e2b81a-4876-45bf-8fc0-d35c25c8a77c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.584s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.445670] env[67008]: DEBUG nova.compute.manager [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: bc882509-a6b8-494c-b334-3c60094ca4ed] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1465.467234] env[67008]: DEBUG nova.compute.manager [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] [instance: bc882509-a6b8-494c-b334-3c60094ca4ed] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1465.487675] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f974a3c6-ef87-497b-aba4-fd1bc2ebd453 tempest-MultipleCreateTestJSON-2129226010 tempest-MultipleCreateTestJSON-2129226010-project-member] Lock "bc882509-a6b8-494c-b334-3c60094ca4ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.609s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.496596] env[67008]: DEBUG nova.compute.manager [None req-d41f6306-c2b4-488e-b565-b741dbd60fa4 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: b0d5a68a-8f4b-4959-a855-dbdc14adca6c] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1465.519393] env[67008]: DEBUG nova.compute.manager [None req-d41f6306-c2b4-488e-b565-b741dbd60fa4 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: b0d5a68a-8f4b-4959-a855-dbdc14adca6c] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1465.542075] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d41f6306-c2b4-488e-b565-b741dbd60fa4 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "b0d5a68a-8f4b-4959-a855-dbdc14adca6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.017s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.554231] env[67008]: DEBUG nova.compute.manager [None req-dc1c3463-a622-446c-9716-03fff0c8b39a tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] [instance: f12d848f-2bed-4838-964b-fd1820160ddd] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1465.582234] env[67008]: DEBUG nova.compute.manager [None req-dc1c3463-a622-446c-9716-03fff0c8b39a tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] [instance: f12d848f-2bed-4838-964b-fd1820160ddd] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1465.609936] env[67008]: DEBUG oslo_concurrency.lockutils [None req-dc1c3463-a622-446c-9716-03fff0c8b39a tempest-SecurityGroupsTestJSON-1239576007 tempest-SecurityGroupsTestJSON-1239576007-project-member] Lock "f12d848f-2bed-4838-964b-fd1820160ddd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.935s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.620027] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1465.670274] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1465.670578] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1465.672082] env[67008]: INFO nova.compute.claims [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1465.898835] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffcbbc5-bf55-4097-979d-9f38aba87700 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.906320] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8f4d9a-109b-4d52-8c0d-bf6875a79af7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.935414] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c312d35-ab48-4f6c-996c-7d8476d48d19 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.942355] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb65234-7f94-4947-b31c-c5ebfc97439a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.955197] env[67008]: DEBUG nova.compute.provider_tree [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.963877] env[67008]: DEBUG nova.scheduler.client.report [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1465.976682] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1465.977168] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1466.009113] env[67008]: DEBUG nova.compute.utils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1466.010349] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Not allocating networking since 'none' was specified. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 1466.018674] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1466.079745] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1466.104973] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1466.105226] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1466.105382] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1466.105561] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1466.105717] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1466.105847] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1466.106154] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1466.106321] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1466.106489] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1466.106650] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1466.106818] env[67008]: DEBUG nova.virt.hardware [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1466.107700] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0061335f-a063-4ee7-8463-a8b9e14bb5b0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.115124] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6852ed66-603d-4497-ab2c-f285c7ba9c4a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.129768] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance VIF info [] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1466.135215] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Creating folder: Project (6c36600d87ee4341abe3f4141d503269). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1466.135457] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a2822d1-ecb2-47e2-b1c8-00f6136a6d2b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.144142] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Created folder: Project (6c36600d87ee4341abe3f4141d503269) in parent group-v567993. [ 1466.144320] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Creating folder: Instances. Parent ref: group-v568071. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1466.144515] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-523adbd5-00ff-4c7b-b0c3-35baf78b54c2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.153275] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Created folder: Instances in parent group-v568071. [ 1466.153485] env[67008]: DEBUG oslo.service.loopingcall [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.153656] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1466.153837] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5b2681f-6fea-43db-b7d6-48ca1f7389c2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.168762] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1466.168762] env[67008]: value = "task-2824963" [ 1466.168762] env[67008]: _type = "Task" [ 1466.168762] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.176527] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824963, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.678876] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824963, 'name': CreateVM_Task, 'duration_secs': 0.234311} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.679059] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1466.679473] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1466.679632] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1466.679946] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1466.680208] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de24974e-f9e6-427f-a1cb-cb4d342b427a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.684573] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Waiting for the task: (returnval){ [ 1466.684573] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520a0757-34fe-285c-df18-9c491a44b700" [ 1466.684573] env[67008]: _type = "Task" [ 1466.684573] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.691631] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520a0757-34fe-285c-df18-9c491a44b700, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.194599] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1467.194599] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1467.194599] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1477.219029] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1477.219326] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1483.461270] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1502.882547] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.858819] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.870165] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1503.870380] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1503.870549] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1503.870703] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1503.871822] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a283df1f-0c54-4d58-b1bb-685b9c21ba6f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.880499] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a602a630-0227-422b-96ab-86eeece54864 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.895773] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75ed083-631b-4d8f-b721-cf9b532ea84c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.902162] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4723479a-8a3c-4f10-995b-231989b0a171 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.930471] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181078MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1503.930618] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1503.930806] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1504.007061] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007061] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007061] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007061] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007329] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007329] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007415] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007518] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007629] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.007741] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1504.017884] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.027350] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.036280] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.045551] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance db85cf50-33c1-4433-ad83-cd33ee24811b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.054180] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance c578d694-4652-4289-81f4-2b00ba20d7fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.063181] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2d01d2f3-212a-41c1-8b00-ca9f7090f239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.072140] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.072360] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1504.072507] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1504.088654] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing inventories for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1504.102840] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Updating ProviderTree inventory for provider ad100a41-192a-4a03-bdd9-0a78ce856705 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1504.103077] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Updating inventory in ProviderTree for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1504.114576] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing aggregate associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, aggregates: None {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1504.133406] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing trait associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1504.320534] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ca3e04-0a5b-4217-a6ed-bbc6dda49269 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.328119] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfe28a4-b5d1-4475-aeb4-2d327d38ab58 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.358206] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab6b7c2-5188-4f02-a972-6e2dc3d44632 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.364935] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb51800b-2751-4e67-86e7-9dc120e5b182 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.377629] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.386367] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1504.398997] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1504.399154] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.468s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1505.397261] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.856837] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.857161] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1505.857312] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1505.878224] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.878458] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.878495] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.878626] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.878749] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.878870] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.878991] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.879127] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.879247] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.879362] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1505.879479] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1505.880020] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.856225] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.856503] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.856608] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1508.852727] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.856370] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.085062] env[67008]: WARNING oslo_vmware.rw_handles [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1510.085062] env[67008]: ERROR oslo_vmware.rw_handles [ 1510.085721] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1510.087309] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1510.087549] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Copying Virtual Disk [datastore1] vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/95509339-44ae-4807-b070-5e26d50a4013/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1510.087837] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9694110a-4718-45b3-8ed0-aca2630db271 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.098007] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Waiting for the task: (returnval){ [ 1510.098007] env[67008]: value = "task-2824964" [ 1510.098007] env[67008]: _type = "Task" [ 1510.098007] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.106486] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Task: {'id': task-2824964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.608396] env[67008]: DEBUG oslo_vmware.exceptions [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1510.608661] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1510.609296] env[67008]: ERROR nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1510.609296] env[67008]: Faults: ['InvalidArgument'] [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Traceback (most recent call last): [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] yield resources [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self.driver.spawn(context, instance, image_meta, [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self._fetch_image_if_missing(context, vi) [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] image_cache(vi, tmp_image_ds_loc) [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] vm_util.copy_virtual_disk( [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] session._wait_for_task(vmdk_copy_task) [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] return self.wait_for_task(task_ref) [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] return evt.wait() [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] result = hub.switch() [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] return self.greenlet.switch() [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self.f(*self.args, **self.kw) [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] raise exceptions.translate_fault(task_info.error) [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Faults: ['InvalidArgument'] [ 1510.609296] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] [ 1510.610117] env[67008]: INFO nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Terminating instance [ 1510.611180] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1510.611383] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1510.611616] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d45af616-7302-4070-a49f-5b1573af75c0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.613748] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1510.613937] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1510.614656] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117f40c8-ed45-48ba-b330-ce8265afb6ed {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.621282] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1510.621488] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-273382c9-04d5-487b-b103-2b3a5366e9ca {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.623649] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1510.623818] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1510.624784] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53bb1028-4fef-4786-bad5-bbbbf669fd3e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.629170] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Waiting for the task: (returnval){ [ 1510.629170] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e3771f-b1d2-f14a-db45-7ad9797a8ebd" [ 1510.629170] env[67008]: _type = "Task" [ 1510.629170] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.637475] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e3771f-b1d2-f14a-db45-7ad9797a8ebd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.691659] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1510.691886] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1510.692121] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Deleting the datastore file [datastore1] 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.692392] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02a17dd9-66d3-4a77-9f9f-5db1f5a30eed {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.698234] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Waiting for the task: (returnval){ [ 1510.698234] env[67008]: value = "task-2824966" [ 1510.698234] env[67008]: _type = "Task" [ 1510.698234] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.706097] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Task: {'id': task-2824966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.140187] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1511.140519] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Creating directory with path [datastore1] vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.140658] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf5c7a98-c34d-484a-aaee-95c0ffdbeaee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.153103] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Created directory with path [datastore1] vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.153327] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Fetch image to [datastore1] vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1511.153500] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1511.154250] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11312cd8-42bc-4960-b9fa-083893a7b3bd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.160768] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3f481f-fed4-4863-b68b-b7bbe70114b8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.170015] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bbd2c8-cd3f-455f-9b43-59e3a1ab99ed {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.202346] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e610d7f-c9ea-487d-a3a8-72afc8cecd3d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.209188] env[67008]: DEBUG oslo_vmware.api [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Task: {'id': task-2824966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082253} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.210538] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1511.210726] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1511.210896] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1511.211077] env[67008]: INFO nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1511.212780] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8aae4cc0-5048-4bc6-911b-20fb12491239 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.214601] env[67008]: DEBUG nova.compute.claims [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1511.214767] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1511.214976] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1511.235263] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1511.385240] env[67008]: DEBUG oslo_vmware.rw_handles [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1511.444126] env[67008]: DEBUG oslo_vmware.rw_handles [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1511.444325] env[67008]: DEBUG oslo_vmware.rw_handles [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1511.502036] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350beb6f-8b6d-48c8-b41e-dccd9297deec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.509711] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8da892-648c-4c2f-8f5e-170ea078cc0c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.540051] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062cf859-7b7d-4920-997a-7c91c3a703f5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.546923] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a9fabf-4a02-40ca-bd05-87c35657179e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.559630] env[67008]: DEBUG nova.compute.provider_tree [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.568232] env[67008]: DEBUG nova.scheduler.client.report [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1511.581297] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.366s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1511.581806] env[67008]: ERROR nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1511.581806] env[67008]: Faults: ['InvalidArgument'] [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Traceback (most recent call last): [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self.driver.spawn(context, instance, image_meta, [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self._fetch_image_if_missing(context, vi) [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] image_cache(vi, tmp_image_ds_loc) [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] vm_util.copy_virtual_disk( [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] session._wait_for_task(vmdk_copy_task) [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] return self.wait_for_task(task_ref) [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] return evt.wait() [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] result = hub.switch() [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] return self.greenlet.switch() [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] self.f(*self.args, **self.kw) [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] raise exceptions.translate_fault(task_info.error) [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Faults: ['InvalidArgument'] [ 1511.581806] env[67008]: ERROR nova.compute.manager [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] [ 1511.582818] env[67008]: DEBUG nova.compute.utils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1511.583830] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Build of instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 was re-scheduled: A specified parameter was not correct: fileType [ 1511.583830] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1511.584210] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1511.584412] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1511.584587] env[67008]: DEBUG nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1511.584751] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1511.987923] env[67008]: DEBUG nova.network.neutron [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.005382] env[67008]: INFO nova.compute.manager [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Took 0.42 seconds to deallocate network for instance. [ 1512.112026] env[67008]: INFO nova.scheduler.client.report [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Deleted allocations for instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 [ 1512.134772] env[67008]: DEBUG oslo_concurrency.lockutils [None req-89546fa8-8631-4ec0-9258-2efca2fb601b tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 677.952s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1512.135893] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.043s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1512.136170] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1512.136421] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1512.136640] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1512.138675] env[67008]: INFO nova.compute.manager [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Terminating instance [ 1512.140527] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquiring lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1512.140780] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Acquired lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1512.140847] env[67008]: DEBUG nova.network.neutron [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1512.146322] env[67008]: DEBUG nova.compute.manager [None req-4b47d448-f62c-4004-8ec8-3842c85d38ed tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: a5ce5bee-ddc9-4671-8750-6e554051315a] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1512.168525] env[67008]: DEBUG nova.network.neutron [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1512.171669] env[67008]: DEBUG nova.compute.manager [None req-4b47d448-f62c-4004-8ec8-3842c85d38ed tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: a5ce5bee-ddc9-4671-8750-6e554051315a] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1512.192110] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4b47d448-f62c-4004-8ec8-3842c85d38ed tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "a5ce5bee-ddc9-4671-8750-6e554051315a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.052s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1512.209950] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1512.259859] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1512.260179] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1512.261569] env[67008]: INFO nova.compute.claims [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1512.394375] env[67008]: DEBUG nova.network.neutron [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.407415] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Releasing lock "refresh_cache-83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1512.407811] env[67008]: DEBUG nova.compute.manager [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1512.408019] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1512.408557] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d69ded23-6931-4195-8379-5b7ee5b9b450 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.422049] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbf7de4-145b-4e53-8c52-34a89e9be197 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.454053] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05 could not be found. [ 1512.454305] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1512.454541] env[67008]: INFO nova.compute.manager [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1512.454725] env[67008]: DEBUG oslo.service.loopingcall [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1512.457201] env[67008]: DEBUG nova.compute.manager [-] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1512.457304] env[67008]: DEBUG nova.network.neutron [-] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1512.476904] env[67008]: DEBUG nova.network.neutron [-] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1512.486087] env[67008]: DEBUG nova.network.neutron [-] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.497971] env[67008]: INFO nova.compute.manager [-] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] Took 0.04 seconds to deallocate network for instance. [ 1512.548610] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e737cd62-5f13-42e9-8244-076b449bcf8e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.559568] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeddc7d3-f990-4781-bda6-8dc42c20f3af {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.592249] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4f9fb3-2ed9-4ee8-a3d2-e4f6df4f411d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.600169] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f1596d3c-1b7b-4c3c-a6f9-735c9e3800fc tempest-InstanceActionsV221TestJSON-281120923 tempest-InstanceActionsV221TestJSON-281120923-project-member] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.464s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1512.601972] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934a81eb-1907-41a1-b106-b870b29b8f59 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.606050] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 352.582s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1512.606217] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 83db2b63-8c40-4cc2-87b0-5d6f36b3fe05] During sync_power_state the instance has a pending task (deleting). Skip. [ 1512.606388] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "83db2b63-8c40-4cc2-87b0-5d6f36b3fe05" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1512.617240] env[67008]: DEBUG nova.compute.provider_tree [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1512.625441] env[67008]: DEBUG nova.scheduler.client.report [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1512.636909] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.377s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1512.637486] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1512.674378] env[67008]: DEBUG nova.compute.utils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1512.675603] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Not allocating networking since 'none' was specified. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 1512.685875] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1512.745411] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1512.770194] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1512.770457] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1512.770618] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1512.770798] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1512.770945] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1512.771106] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1512.771317] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1512.771477] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1512.771644] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1512.771807] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1512.772012] env[67008]: DEBUG nova.virt.hardware [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1512.772858] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f0dccd-ba2e-4548-8e16-9bea29165326 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.780799] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58907b51-e08b-42a7-ade2-b58685ab6502 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.794456] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance VIF info [] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1512.799848] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Creating folder: Project (da1a8c2ae1054e378295d18cf0524fef). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1512.800142] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6e91e66-9277-42b6-a8b1-cd4c60adac90 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.809718] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Created folder: Project (da1a8c2ae1054e378295d18cf0524fef) in parent group-v567993. [ 1512.810013] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Creating folder: Instances. Parent ref: group-v568074. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1512.810151] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35c305ea-bd8c-4521-9d54-9b8001a7b9bb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.818271] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Created folder: Instances in parent group-v568074. [ 1512.818492] env[67008]: DEBUG oslo.service.loopingcall [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1512.818668] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1512.818860] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbd71f14-5588-4280-a000-12dbba5702ef {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.835196] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1512.835196] env[67008]: value = "task-2824969" [ 1512.835196] env[67008]: _type = "Task" [ 1512.835196] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.842524] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824969, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.345026] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824969, 'name': CreateVM_Task, 'duration_secs': 0.251157} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.345341] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1513.345631] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1513.345786] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1513.346095] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1513.346332] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a58fd00-06e2-408c-8b81-53dbb85ea1e4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.350554] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Waiting for the task: (returnval){ [ 1513.350554] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5258a9b8-74e0-0b79-964c-ab11a27f1da7" [ 1513.350554] env[67008]: _type = "Task" [ 1513.350554] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.357756] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5258a9b8-74e0-0b79-964c-ab11a27f1da7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.851345] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1513.865418] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1513.865657] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1513.865861] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1559.733099] env[67008]: WARNING oslo_vmware.rw_handles [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1559.733099] env[67008]: ERROR oslo_vmware.rw_handles [ 1559.733811] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1559.735669] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1559.735962] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Copying Virtual Disk [datastore1] vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/e465c88c-be8b-45ac-81a3-c0905f071236/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1559.736314] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-071a97b2-8db6-4f8c-951e-26df07b7aa91 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.747529] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Waiting for the task: (returnval){ [ 1559.747529] env[67008]: value = "task-2824970" [ 1559.747529] env[67008]: _type = "Task" [ 1559.747529] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.755121] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Task: {'id': task-2824970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.258073] env[67008]: DEBUG oslo_vmware.exceptions [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1560.258354] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1560.258892] env[67008]: ERROR nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1560.258892] env[67008]: Faults: ['InvalidArgument'] [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Traceback (most recent call last): [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] yield resources [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self.driver.spawn(context, instance, image_meta, [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self._fetch_image_if_missing(context, vi) [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] image_cache(vi, tmp_image_ds_loc) [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] vm_util.copy_virtual_disk( [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] session._wait_for_task(vmdk_copy_task) [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] return self.wait_for_task(task_ref) [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] return evt.wait() [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] result = hub.switch() [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] return self.greenlet.switch() [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self.f(*self.args, **self.kw) [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] raise exceptions.translate_fault(task_info.error) [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Faults: ['InvalidArgument'] [ 1560.258892] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] [ 1560.259845] env[67008]: INFO nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Terminating instance [ 1560.260820] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1560.261036] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.261275] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e6d9e56-6400-4e1b-94b2-b3a195cdaef3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.263397] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1560.263648] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1560.264378] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943f0988-a920-45da-a93d-95b3682e3c45 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.271079] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1560.271260] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0822b41f-f2fd-4b04-8848-f23e0bea9113 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.273399] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.273582] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1560.274548] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55d20a06-9f92-4f91-adc1-6839edaa8222 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.279040] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Waiting for the task: (returnval){ [ 1560.279040] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]521740d0-0584-9255-ea8a-daddd5e95fd7" [ 1560.279040] env[67008]: _type = "Task" [ 1560.279040] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.286607] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]521740d0-0584-9255-ea8a-daddd5e95fd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.350685] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1560.350949] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1560.351182] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Deleting the datastore file [datastore1] 3c10ac79-441a-467c-a3aa-fdb9a9451698 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.351486] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e2e05f4-0d86-4d3e-ac37-1f010c282f5a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.357843] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Waiting for the task: (returnval){ [ 1560.357843] env[67008]: value = "task-2824972" [ 1560.357843] env[67008]: _type = "Task" [ 1560.357843] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.365747] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Task: {'id': task-2824972, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.789610] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1560.789967] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Creating directory with path [datastore1] vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.790122] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38451b5d-09fa-4704-ab52-2a71bedf4116 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.800965] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Created directory with path [datastore1] vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.801255] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Fetch image to [datastore1] vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1560.801446] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1560.802170] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74e5fa2-7c8e-4873-9bbe-7626c8f2eb5f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.808588] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd725f8d-446f-4e74-be81-2a4b9c7b077f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.817345] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde1f945-3ea1-48b2-acbb-5066e2d5d987 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.848400] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d232c72-4078-458a-b719-073ca671bd8e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.853733] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d9e2d5ed-43d8-4f02-be2e-675d8702d211 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.865750] env[67008]: DEBUG oslo_vmware.api [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Task: {'id': task-2824972, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0696} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.865990] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1560.866197] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1560.866397] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1560.866618] env[67008]: INFO nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1560.868817] env[67008]: DEBUG nova.compute.claims [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1560.868982] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1560.869211] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1560.874437] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1560.975096] env[67008]: DEBUG oslo_vmware.rw_handles [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1561.033670] env[67008]: DEBUG oslo_vmware.rw_handles [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1561.033859] env[67008]: DEBUG oslo_vmware.rw_handles [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1561.144979] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb9846f-6927-4722-a51a-f2cad58e21a9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.152692] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8259e77-c2e2-4a76-9051-304f2adcc89a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.181516] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0b8b36-08e3-4511-8322-0d32c2015d07 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.188724] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbba086-e83a-4406-b7a7-62b5c1f49639 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.202610] env[67008]: DEBUG nova.compute.provider_tree [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.211231] env[67008]: DEBUG nova.scheduler.client.report [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1561.226693] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.357s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1561.227270] env[67008]: ERROR nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1561.227270] env[67008]: Faults: ['InvalidArgument'] [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Traceback (most recent call last): [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self.driver.spawn(context, instance, image_meta, [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self._fetch_image_if_missing(context, vi) [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] image_cache(vi, tmp_image_ds_loc) [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] vm_util.copy_virtual_disk( [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] session._wait_for_task(vmdk_copy_task) [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] return self.wait_for_task(task_ref) [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] return evt.wait() [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] result = hub.switch() [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] return self.greenlet.switch() [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] self.f(*self.args, **self.kw) [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] raise exceptions.translate_fault(task_info.error) [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Faults: ['InvalidArgument'] [ 1561.227270] env[67008]: ERROR nova.compute.manager [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] [ 1561.228091] env[67008]: DEBUG nova.compute.utils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1561.230106] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Build of instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 was re-scheduled: A specified parameter was not correct: fileType [ 1561.230106] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1561.230545] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1561.230729] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1561.230979] env[67008]: DEBUG nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1561.231185] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1561.664303] env[67008]: DEBUG nova.network.neutron [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.675035] env[67008]: INFO nova.compute.manager [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Took 0.44 seconds to deallocate network for instance. [ 1561.775221] env[67008]: INFO nova.scheduler.client.report [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Deleted allocations for instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 [ 1561.795399] env[67008]: DEBUG oslo_concurrency.lockutils [None req-95ede50e-d4f1-4203-8d0f-794e86c13333 tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 669.142s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1561.796530] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 473.016s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1561.796757] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Acquiring lock "3c10ac79-441a-467c-a3aa-fdb9a9451698-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1561.796963] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1561.797146] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1561.799125] env[67008]: INFO nova.compute.manager [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Terminating instance [ 1561.800867] env[67008]: DEBUG nova.compute.manager [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1561.801110] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1561.801565] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b335eb9-54f1-4179-9eff-bf3ccc865918 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.811146] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3992dd30-e0eb-48c4-a3a7-a4df642a1411 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.821672] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1561.842718] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3c10ac79-441a-467c-a3aa-fdb9a9451698 could not be found. [ 1561.842906] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1561.843088] env[67008]: INFO nova.compute.manager [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1561.843494] env[67008]: DEBUG oslo.service.loopingcall [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.843590] env[67008]: DEBUG nova.compute.manager [-] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1561.843718] env[67008]: DEBUG nova.network.neutron [-] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1561.869685] env[67008]: DEBUG nova.network.neutron [-] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.871304] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1561.871545] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1561.873083] env[67008]: INFO nova.compute.claims [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1561.877154] env[67008]: INFO nova.compute.manager [-] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] Took 0.03 seconds to deallocate network for instance. [ 1561.961379] env[67008]: DEBUG oslo_concurrency.lockutils [None req-c6d23b2e-42ef-4c7f-abdf-3ce80052fc3e tempest-ServersTestManualDisk-1873214009 tempest-ServersTestManualDisk-1873214009-project-member] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1561.962504] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 401.939s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1561.962698] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3c10ac79-441a-467c-a3aa-fdb9a9451698] During sync_power_state the instance has a pending task (deleting). Skip. [ 1561.962870] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "3c10ac79-441a-467c-a3aa-fdb9a9451698" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1562.097639] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57ec654-e567-47f3-8d6e-cdeff3711dbc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.106781] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1abdfcf-38d1-4a43-882e-459d949eb39c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.135665] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6f7d2e-4d23-482d-b9f6-d28029106fe0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.142461] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d55fdc9-11cc-43da-8b23-8f0bd9c5816c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.154957] env[67008]: DEBUG nova.compute.provider_tree [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1562.163620] env[67008]: DEBUG nova.scheduler.client.report [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1562.176349] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1562.176795] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1562.209325] env[67008]: DEBUG nova.compute.utils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1562.210857] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1562.210857] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1562.220682] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1562.276991] env[67008]: DEBUG nova.policy [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '00a4714cefa8439591f1c04b9633d2d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50523e61dfe8446d9dd72ab8e8fbd19c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1562.285240] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1562.309416] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1562.309660] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1562.309815] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1562.309996] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1562.310157] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1562.310306] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1562.310512] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1562.310673] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1562.310840] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1562.311015] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1562.311193] env[67008]: DEBUG nova.virt.hardware [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1562.312050] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e42566-0cbe-4a5e-8929-8dc6a805d380 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.321541] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2dc1976-16c7-4c16-9254-8d8d28b686a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.719494] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Successfully created port: 07b7b6af-ba31-4bc8-8aec-e99fc744faeb {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1563.359193] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Successfully updated port: 07b7b6af-ba31-4bc8-8aec-e99fc744faeb {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1563.369875] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "refresh_cache-7868f806-e64f-4964-9e1e-bcb8d29e685f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1563.370380] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired lock "refresh_cache-7868f806-e64f-4964-9e1e-bcb8d29e685f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1563.370380] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1563.408814] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1563.682104] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Updating instance_info_cache with network_info: [{"id": "07b7b6af-ba31-4bc8-8aec-e99fc744faeb", "address": "fa:16:3e:ee:3f:e7", "network": {"id": "324f2653-1ff3-4547-8dee-fbdad1adff80", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-464980593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50523e61dfe8446d9dd72ab8e8fbd19c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "113aa98d-90ca-43bc-a534-8908d1ec7d15", "external-id": "nsx-vlan-transportzone-186", "segmentation_id": 186, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b7b6af-ba", "ovs_interfaceid": "07b7b6af-ba31-4bc8-8aec-e99fc744faeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.694338] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Releasing lock "refresh_cache-7868f806-e64f-4964-9e1e-bcb8d29e685f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1563.694646] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Instance network_info: |[{"id": "07b7b6af-ba31-4bc8-8aec-e99fc744faeb", "address": "fa:16:3e:ee:3f:e7", "network": {"id": "324f2653-1ff3-4547-8dee-fbdad1adff80", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-464980593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50523e61dfe8446d9dd72ab8e8fbd19c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "113aa98d-90ca-43bc-a534-8908d1ec7d15", "external-id": "nsx-vlan-transportzone-186", "segmentation_id": 186, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b7b6af-ba", "ovs_interfaceid": "07b7b6af-ba31-4bc8-8aec-e99fc744faeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1563.695059] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:3f:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '113aa98d-90ca-43bc-a534-8908d1ec7d15', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07b7b6af-ba31-4bc8-8aec-e99fc744faeb', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1563.702620] env[67008]: DEBUG oslo.service.loopingcall [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1563.703091] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1563.703334] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a492ae1-0096-4b71-98c0-69ac7b1d0be5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.723846] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1563.723846] env[67008]: value = "task-2824973" [ 1563.723846] env[67008]: _type = "Task" [ 1563.723846] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.731555] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824973, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.733881] env[67008]: DEBUG nova.compute.manager [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Received event network-vif-plugged-07b7b6af-ba31-4bc8-8aec-e99fc744faeb {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1563.734051] env[67008]: DEBUG oslo_concurrency.lockutils [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] Acquiring lock "7868f806-e64f-4964-9e1e-bcb8d29e685f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1563.734260] env[67008]: DEBUG oslo_concurrency.lockutils [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1563.734423] env[67008]: DEBUG oslo_concurrency.lockutils [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1563.734583] env[67008]: DEBUG nova.compute.manager [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] No waiting events found dispatching network-vif-plugged-07b7b6af-ba31-4bc8-8aec-e99fc744faeb {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1563.734741] env[67008]: WARNING nova.compute.manager [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Received unexpected event network-vif-plugged-07b7b6af-ba31-4bc8-8aec-e99fc744faeb for instance with vm_state building and task_state spawning. [ 1563.734895] env[67008]: DEBUG nova.compute.manager [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Received event network-changed-07b7b6af-ba31-4bc8-8aec-e99fc744faeb {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1563.735057] env[67008]: DEBUG nova.compute.manager [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Refreshing instance network info cache due to event network-changed-07b7b6af-ba31-4bc8-8aec-e99fc744faeb. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1563.735237] env[67008]: DEBUG oslo_concurrency.lockutils [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] Acquiring lock "refresh_cache-7868f806-e64f-4964-9e1e-bcb8d29e685f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1563.735402] env[67008]: DEBUG oslo_concurrency.lockutils [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] Acquired lock "refresh_cache-7868f806-e64f-4964-9e1e-bcb8d29e685f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1563.735576] env[67008]: DEBUG nova.network.neutron [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Refreshing network info cache for port 07b7b6af-ba31-4bc8-8aec-e99fc744faeb {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1563.856241] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.856502] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.868704] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1563.868945] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1563.869163] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1563.869324] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1563.870500] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ac154c-e0bc-4497-b8d6-0d3a436237f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.879314] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55467403-2300-447a-afb0-23d904702da1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.894306] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4848de-79d9-497d-ad0d-1c615bc1a872 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.901520] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a823ec0-8ebb-428f-a4af-51ee926adc12 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.938312] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181068MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1563.938570] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1563.938667] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1564.018135] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.018301] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.018494] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.018554] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.018655] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.018820] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.018879] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.019031] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.019109] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.019217] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1564.030503] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1564.041363] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance db85cf50-33c1-4433-ad83-cd33ee24811b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1564.054265] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance c578d694-4652-4289-81f4-2b00ba20d7fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1564.064177] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2d01d2f3-212a-41c1-8b00-ca9f7090f239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1564.073272] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1564.073520] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1564.073673] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1564.236044] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824973, 'name': CreateVM_Task} progress is 25%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.260643] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349df67b-7cf9-4616-86e9-4376d4a5f036 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.268169] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de5cfb7-2fa8-4266-ab66-4f50a92736b3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.297482] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c307f1-1f9c-4235-8cac-564bc2772bac {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.300589] env[67008]: DEBUG nova.network.neutron [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Updated VIF entry in instance network info cache for port 07b7b6af-ba31-4bc8-8aec-e99fc744faeb. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1564.300899] env[67008]: DEBUG nova.network.neutron [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Updating instance_info_cache with network_info: [{"id": "07b7b6af-ba31-4bc8-8aec-e99fc744faeb", "address": "fa:16:3e:ee:3f:e7", "network": {"id": "324f2653-1ff3-4547-8dee-fbdad1adff80", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-464980593-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50523e61dfe8446d9dd72ab8e8fbd19c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "113aa98d-90ca-43bc-a534-8908d1ec7d15", "external-id": "nsx-vlan-transportzone-186", "segmentation_id": 186, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07b7b6af-ba", "ovs_interfaceid": "07b7b6af-ba31-4bc8-8aec-e99fc744faeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.306768] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84fd6a1-c4a0-4689-aa7f-32c50656f152 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.312246] env[67008]: DEBUG oslo_concurrency.lockutils [req-f5ecd6b8-6363-410c-bcaa-4894d0cb105d req-eeed8057-271c-41a5-8a1a-c29b872276a5 service nova] Releasing lock "refresh_cache-7868f806-e64f-4964-9e1e-bcb8d29e685f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1564.321993] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.330807] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1564.342727] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1564.342901] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.404s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1564.734222] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824973, 'name': CreateVM_Task} progress is 25%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.235518] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824973, 'name': CreateVM_Task, 'duration_secs': 1.253829} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.235678] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1565.236316] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1565.236484] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1565.236815] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1565.237087] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2262a24-1c68-4d58-9ed0-ad51e4ce8e3f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.241622] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1565.241622] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e111aa-22ad-36b7-97e7-e5761c4ce081" [ 1565.241622] env[67008]: _type = "Task" [ 1565.241622] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.249201] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e111aa-22ad-36b7-97e7-e5761c4ce081, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.752476] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1565.752817] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1565.752998] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1566.343505] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.856454] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.856867] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1567.856867] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1567.876014] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.876200] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.876303] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.876434] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.876599] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.876729] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.876847] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.876964] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.877093] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.877210] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1567.877325] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1567.877810] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.877992] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.878170] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.878302] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1568.873584] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.857065] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1594.872669] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "0c45068a-d333-4247-841d-bf40ebb779da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1595.387896] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1604.722435] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1604.722792] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1609.752982] env[67008]: WARNING oslo_vmware.rw_handles [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1609.752982] env[67008]: ERROR oslo_vmware.rw_handles [ 1609.753581] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1609.755414] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1609.755659] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Copying Virtual Disk [datastore1] vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/632f241d-ab7f-4dbb-a1b1-be98b4467ccc/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1609.755949] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3cc2591-c25d-41df-a31f-f92c140a84ee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.763937] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Waiting for the task: (returnval){ [ 1609.763937] env[67008]: value = "task-2824974" [ 1609.763937] env[67008]: _type = "Task" [ 1609.763937] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.771880] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Task: {'id': task-2824974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.275054] env[67008]: DEBUG oslo_vmware.exceptions [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1610.275188] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1610.275675] env[67008]: ERROR nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1610.275675] env[67008]: Faults: ['InvalidArgument'] [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Traceback (most recent call last): [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] yield resources [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self.driver.spawn(context, instance, image_meta, [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self._fetch_image_if_missing(context, vi) [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] image_cache(vi, tmp_image_ds_loc) [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] vm_util.copy_virtual_disk( [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] session._wait_for_task(vmdk_copy_task) [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] return self.wait_for_task(task_ref) [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] return evt.wait() [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] result = hub.switch() [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] return self.greenlet.switch() [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self.f(*self.args, **self.kw) [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] raise exceptions.translate_fault(task_info.error) [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Faults: ['InvalidArgument'] [ 1610.275675] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] [ 1610.276570] env[67008]: INFO nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Terminating instance [ 1610.277511] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1610.277719] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1610.278348] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1610.278534] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1610.278764] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-962639e7-555d-4e88-aa87-f8bab7f4b50c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.280984] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c83845-9f0d-43d6-8663-e8589b064adc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.287784] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1610.287985] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3d30b3d-fb68-4230-a891-c6a67fff70c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.289989] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1610.290182] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1610.291117] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6007c031-5549-4bff-9c50-557fb15ff6a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.295641] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1610.295641] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52420d17-cca1-c449-ffe5-6514701b83f6" [ 1610.295641] env[67008]: _type = "Task" [ 1610.295641] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.302687] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52420d17-cca1-c449-ffe5-6514701b83f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.626351] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1610.626584] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1610.626734] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Deleting the datastore file [datastore1] 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1610.627092] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2446fb9f-6a4e-4558-8dea-9546502b5167 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.634112] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Waiting for the task: (returnval){ [ 1610.634112] env[67008]: value = "task-2824976" [ 1610.634112] env[67008]: _type = "Task" [ 1610.634112] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.641535] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Task: {'id': task-2824976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.805903] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1610.806322] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating directory with path [datastore1] vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1610.806393] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1069a573-d99a-48cf-ac99-8e03e6374ab2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.817553] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Created directory with path [datastore1] vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1610.817757] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Fetch image to [datastore1] vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1610.817907] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1610.818713] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59adf9f9-5c92-43a5-bbd9-5e0d35a758b0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.825148] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a8b186-8362-4b01-aa1e-8cef7bfc592f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.834074] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b832586-4ab4-4f45-b0c0-a4bc84ec7e53 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.864837] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56aa8ba4-6ff4-4469-8a1f-13d8fbb837f3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.869930] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2eca1226-5c2c-4902-830a-ec993a43ca94 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.888809] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1610.937340] env[67008]: DEBUG oslo_vmware.rw_handles [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1610.997622] env[67008]: DEBUG oslo_vmware.rw_handles [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1610.997916] env[67008]: DEBUG oslo_vmware.rw_handles [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1611.145425] env[67008]: DEBUG oslo_vmware.api [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Task: {'id': task-2824976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084998} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.145732] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1611.146090] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1611.146090] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1611.146251] env[67008]: INFO nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Took 0.87 seconds to destroy the instance on the hypervisor. [ 1611.148387] env[67008]: DEBUG nova.compute.claims [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1611.148592] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1611.148812] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1611.367354] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47993f4e-2206-491c-97df-8d36b985f6a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.374924] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0239e98-cd90-43a6-bd91-2e2340259233 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.403398] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adf9830-2547-400f-95cb-a0b4a3928302 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.409912] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a855b66-0d67-4c7d-9311-0b6ef784d431 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.423123] env[67008]: DEBUG nova.compute.provider_tree [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1611.431197] env[67008]: DEBUG nova.scheduler.client.report [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1611.444756] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1611.445303] env[67008]: ERROR nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1611.445303] env[67008]: Faults: ['InvalidArgument'] [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Traceback (most recent call last): [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self.driver.spawn(context, instance, image_meta, [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self._fetch_image_if_missing(context, vi) [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] image_cache(vi, tmp_image_ds_loc) [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] vm_util.copy_virtual_disk( [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] session._wait_for_task(vmdk_copy_task) [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] return self.wait_for_task(task_ref) [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] return evt.wait() [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] result = hub.switch() [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] return self.greenlet.switch() [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] self.f(*self.args, **self.kw) [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] raise exceptions.translate_fault(task_info.error) [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Faults: ['InvalidArgument'] [ 1611.445303] env[67008]: ERROR nova.compute.manager [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] [ 1611.446186] env[67008]: DEBUG nova.compute.utils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1611.447345] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Build of instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c was re-scheduled: A specified parameter was not correct: fileType [ 1611.447345] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1611.447710] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1611.447877] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1611.448058] env[67008]: DEBUG nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1611.448223] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1611.809312] env[67008]: DEBUG nova.network.neutron [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.822023] env[67008]: INFO nova.compute.manager [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Took 0.37 seconds to deallocate network for instance. [ 1611.920653] env[67008]: INFO nova.scheduler.client.report [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Deleted allocations for instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c [ 1611.949248] env[67008]: DEBUG oslo_concurrency.lockutils [None req-3c7abc93-ace6-4ee2-8c0c-eb0e90bae31e tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 607.083s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1611.950483] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 451.927s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1611.950672] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] During sync_power_state the instance has a pending task (spawning). Skip. [ 1611.950844] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1611.951484] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 411.086s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1611.951698] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Acquiring lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1611.951898] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1611.952132] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1611.954232] env[67008]: INFO nova.compute.manager [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Terminating instance [ 1611.955915] env[67008]: DEBUG nova.compute.manager [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1611.956128] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1611.956389] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8aa6d5b-2e69-4856-9459-e599921f8fde {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.966011] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a3ba49-0f03-4d79-b77f-fc4a8ebf1dc4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.977121] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1611.997316] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c could not be found. [ 1611.997519] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1611.997693] env[67008]: INFO nova.compute.manager [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1611.997936] env[67008]: DEBUG oslo.service.loopingcall [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1611.998179] env[67008]: DEBUG nova.compute.manager [-] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1611.998275] env[67008]: DEBUG nova.network.neutron [-] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1612.021499] env[67008]: DEBUG nova.network.neutron [-] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.025560] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1612.025790] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1612.027207] env[67008]: INFO nova.compute.claims [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1612.030239] env[67008]: INFO nova.compute.manager [-] [instance: 81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c] Took 0.03 seconds to deallocate network for instance. [ 1612.117872] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f40b5d87-7649-48a9-8258-fecd3ad0e37c tempest-ServerActionsTestOtherB-1943301535 tempest-ServerActionsTestOtherB-1943301535-project-member] Lock "81c79b24-9380-4b9d-a29d-9cd8d5ac4b5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1612.255013] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98237476-eeb4-43fa-ab6b-d974ce713501 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.263879] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8e9d70-18d8-4ed8-97d9-24b2ade28978 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.293714] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfdf17b-50ca-4fb9-8cc9-27e871a2ef1b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.301065] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad13ad8-ca5f-4520-8bc8-b607807aed37 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.314047] env[67008]: DEBUG nova.compute.provider_tree [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.322562] env[67008]: DEBUG nova.scheduler.client.report [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1612.335489] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1612.335937] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1612.370019] env[67008]: DEBUG nova.compute.utils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1612.370400] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1612.370572] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1612.380072] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1612.444927] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1612.466808] env[67008]: DEBUG nova.policy [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b4ccaa5617a4e8fba19d15f4fef6d01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '373c9d91c0614a3fbc1053354600c8d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1612.472255] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1612.472483] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1612.472629] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1612.472923] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1612.472983] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1612.473156] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1612.473361] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1612.473519] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1612.473678] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1612.473859] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1612.474045] env[67008]: DEBUG nova.virt.hardware [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1612.474879] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2552afc3-2736-404e-8a45-cff12d8de983 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.482893] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afcc45a-e192-426d-aed2-b80778512f54 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.760449] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Successfully created port: a25ba074-2c1d-454c-bed8-9f48a9bdd1c4 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1613.523286] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Successfully updated port: a25ba074-2c1d-454c-bed8-9f48a9bdd1c4 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1613.535800] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "refresh_cache-9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1613.535945] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquired lock "refresh_cache-9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1613.536145] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1613.630903] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1613.803210] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Updating instance_info_cache with network_info: [{"id": "a25ba074-2c1d-454c-bed8-9f48a9bdd1c4", "address": "fa:16:3e:3e:ce:ff", "network": {"id": "f1d4cc42-cda4-41be-ba85-ed5a8d4f7825", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-383197113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373c9d91c0614a3fbc1053354600c8d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa25ba074-2c", "ovs_interfaceid": "a25ba074-2c1d-454c-bed8-9f48a9bdd1c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.816734] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Releasing lock "refresh_cache-9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1613.817042] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Instance network_info: |[{"id": "a25ba074-2c1d-454c-bed8-9f48a9bdd1c4", "address": "fa:16:3e:3e:ce:ff", "network": {"id": "f1d4cc42-cda4-41be-ba85-ed5a8d4f7825", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-383197113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373c9d91c0614a3fbc1053354600c8d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa25ba074-2c", "ovs_interfaceid": "a25ba074-2c1d-454c-bed8-9f48a9bdd1c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1613.817443] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:ce:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '01fe2e08-46f6-4cee-aefd-934461f8077d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a25ba074-2c1d-454c-bed8-9f48a9bdd1c4', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1613.825473] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Creating folder: Project (373c9d91c0614a3fbc1053354600c8d9). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1613.826331] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6564b4b3-30cb-4089-911e-c4de51afec90 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.836527] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Created folder: Project (373c9d91c0614a3fbc1053354600c8d9) in parent group-v567993. [ 1613.836713] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Creating folder: Instances. Parent ref: group-v568078. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1613.836934] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2deb382-e00b-49de-92fe-9c6bfcf0883b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.844910] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Created folder: Instances in parent group-v568078. [ 1613.845157] env[67008]: DEBUG oslo.service.loopingcall [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.845339] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1613.845525] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-808332e0-57c5-4f1c-b950-98d1fe1a06c9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.863767] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1613.863767] env[67008]: value = "task-2824979" [ 1613.863767] env[67008]: _type = "Task" [ 1613.863767] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.870811] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824979, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.876510] env[67008]: DEBUG nova.compute.manager [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Received event network-vif-plugged-a25ba074-2c1d-454c-bed8-9f48a9bdd1c4 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1613.876731] env[67008]: DEBUG oslo_concurrency.lockutils [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] Acquiring lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1613.876939] env[67008]: DEBUG oslo_concurrency.lockutils [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1613.877163] env[67008]: DEBUG oslo_concurrency.lockutils [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1613.877283] env[67008]: DEBUG nova.compute.manager [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] No waiting events found dispatching network-vif-plugged-a25ba074-2c1d-454c-bed8-9f48a9bdd1c4 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1613.877426] env[67008]: WARNING nova.compute.manager [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Received unexpected event network-vif-plugged-a25ba074-2c1d-454c-bed8-9f48a9bdd1c4 for instance with vm_state building and task_state spawning. [ 1613.877599] env[67008]: DEBUG nova.compute.manager [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Received event network-changed-a25ba074-2c1d-454c-bed8-9f48a9bdd1c4 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1613.877720] env[67008]: DEBUG nova.compute.manager [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Refreshing instance network info cache due to event network-changed-a25ba074-2c1d-454c-bed8-9f48a9bdd1c4. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1613.877918] env[67008]: DEBUG oslo_concurrency.lockutils [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] Acquiring lock "refresh_cache-9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1613.878081] env[67008]: DEBUG oslo_concurrency.lockutils [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] Acquired lock "refresh_cache-9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1613.878239] env[67008]: DEBUG nova.network.neutron [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Refreshing network info cache for port a25ba074-2c1d-454c-bed8-9f48a9bdd1c4 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1614.118642] env[67008]: DEBUG nova.network.neutron [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Updated VIF entry in instance network info cache for port a25ba074-2c1d-454c-bed8-9f48a9bdd1c4. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1614.118993] env[67008]: DEBUG nova.network.neutron [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Updating instance_info_cache with network_info: [{"id": "a25ba074-2c1d-454c-bed8-9f48a9bdd1c4", "address": "fa:16:3e:3e:ce:ff", "network": {"id": "f1d4cc42-cda4-41be-ba85-ed5a8d4f7825", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-383197113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373c9d91c0614a3fbc1053354600c8d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa25ba074-2c", "ovs_interfaceid": "a25ba074-2c1d-454c-bed8-9f48a9bdd1c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.128197] env[67008]: DEBUG oslo_concurrency.lockutils [req-cd21f979-b4c1-4a0d-b715-9fb374e3020d req-ccd14aa5-cfb1-4128-952a-94acaaf5a053 service nova] Releasing lock "refresh_cache-9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1614.374626] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824979, 'name': CreateVM_Task, 'duration_secs': 0.298077} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.374807] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1614.375508] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1614.375678] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1614.376008] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1614.376390] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e852f28-c38b-48ef-abb1-edf8caf354cb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.380742] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Waiting for the task: (returnval){ [ 1614.380742] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52516e36-1cf3-4001-81aa-4fd348261cf2" [ 1614.380742] env[67008]: _type = "Task" [ 1614.380742] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.388595] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52516e36-1cf3-4001-81aa-4fd348261cf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.890339] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1614.890690] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1614.890835] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1623.856549] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.857491] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.882929] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1624.883213] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1624.883388] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1624.883546] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1624.885141] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7053d6-a802-4250-bc3b-69e8c50a0666 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.895239] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f84adbb-cea2-4a71-afd6-034944d51fd4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.913137] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99e78a0-d428-4ffc-9679-49d96bff3d35 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.920083] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dedf95b5-0347-4919-963a-63f249f2f803 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.951102] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181079MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1624.951287] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1624.951472] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1625.057720] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.057966] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 1f040a29-196b-4a5c-808f-53dc56f3facc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.058177] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.058358] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.058561] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.059884] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.059884] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.059884] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.059884] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.059884] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1625.075141] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance db85cf50-33c1-4433-ad83-cd33ee24811b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1625.092696] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance c578d694-4652-4289-81f4-2b00ba20d7fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1625.104767] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 2d01d2f3-212a-41c1-8b00-ca9f7090f239 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1625.117175] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1625.130712] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1625.130964] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1625.131128] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1625.375518] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e8bed1-1dab-4c27-8bdd-23e8138de9bc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.386450] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f570cc1-6535-4410-a2b9-91ecf4f7f339 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.435139] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae8473d-08cf-4a21-b7e9-8a6b6686eee2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.445628] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9f669b-ef05-4aa6-b87a-8aeb7f179da5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.464732] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1625.476290] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1625.493968] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1625.494203] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.543s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1625.509216] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1625.509452] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1627.493777] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.856757] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1628.857621] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1628.857997] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1628.857997] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1628.880365] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.880518] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.880696] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.880880] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.881027] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.881205] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.881360] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.881491] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.881626] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.881772] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1628.881915] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1629.856267] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.856529] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.856700] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.856846] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1631.642598] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1631.857364] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1633.852661] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1643.321449] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "5ecc1376-aab4-4b17-8746-39bed51edbba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1643.321813] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1657.742172] env[67008]: WARNING oslo_vmware.rw_handles [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1657.742172] env[67008]: ERROR oslo_vmware.rw_handles [ 1657.742775] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1657.744656] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1657.744913] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Copying Virtual Disk [datastore1] vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/c3c9de9e-7443-4386-a549-1298064c2cc5/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1657.745266] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17bcc9eb-1c9e-4bf2-bcd0-9b32324ef6c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.755372] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1657.755372] env[67008]: value = "task-2824980" [ 1657.755372] env[67008]: _type = "Task" [ 1657.755372] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.763321] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': task-2824980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.266435] env[67008]: DEBUG oslo_vmware.exceptions [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1658.266723] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1658.267270] env[67008]: ERROR nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1658.267270] env[67008]: Faults: ['InvalidArgument'] [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Traceback (most recent call last): [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] yield resources [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self.driver.spawn(context, instance, image_meta, [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self._fetch_image_if_missing(context, vi) [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] image_cache(vi, tmp_image_ds_loc) [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] vm_util.copy_virtual_disk( [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] session._wait_for_task(vmdk_copy_task) [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] return self.wait_for_task(task_ref) [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] return evt.wait() [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] result = hub.switch() [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] return self.greenlet.switch() [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self.f(*self.args, **self.kw) [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] raise exceptions.translate_fault(task_info.error) [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Faults: ['InvalidArgument'] [ 1658.267270] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] [ 1658.268114] env[67008]: INFO nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Terminating instance [ 1658.269148] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1658.269358] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1658.269598] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-529312e0-0811-4386-8df9-7aa1f6ea2cf0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.271775] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1658.271960] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1658.272683] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c3d6cd-f80c-4f6d-b2ea-6690637479b9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.279431] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1658.279633] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a89094b7-e087-46ba-b2d7-c9c11b4a5626 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.281701] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1658.281870] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1658.282842] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7ed1f15-87e3-4eb4-b92d-bc7317db6ae7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.287579] env[67008]: DEBUG oslo_vmware.api [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for the task: (returnval){ [ 1658.287579] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ab0196-da37-d7d7-4e37-3a48b6424ea4" [ 1658.287579] env[67008]: _type = "Task" [ 1658.287579] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.295189] env[67008]: DEBUG oslo_vmware.api [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ab0196-da37-d7d7-4e37-3a48b6424ea4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.346683] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1658.346863] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1658.347033] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Deleting the datastore file [datastore1] 94e8ddc5-d43c-49d5-93c6-f08081ed7643 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1658.347303] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd317d52-33ce-4a93-ab9d-509903219a8b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.353291] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1658.353291] env[67008]: value = "task-2824982" [ 1658.353291] env[67008]: _type = "Task" [ 1658.353291] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.360751] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': task-2824982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.797703] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1658.798056] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Creating directory with path [datastore1] vmware_temp/ff54e79f-0b1b-43f3-b4ba-ed4de3c57092/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1658.798214] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a68816a-fe21-4376-970b-2204a8ef8f7c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.808977] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Created directory with path [datastore1] vmware_temp/ff54e79f-0b1b-43f3-b4ba-ed4de3c57092/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1658.809175] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Fetch image to [datastore1] vmware_temp/ff54e79f-0b1b-43f3-b4ba-ed4de3c57092/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1658.809345] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/ff54e79f-0b1b-43f3-b4ba-ed4de3c57092/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1658.810073] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1c20c6-6c1a-4005-be00-4daa578121e1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.816313] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70038fc3-8c94-49dd-b6f4-aa78f071947c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.825162] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78cb9fa-4e64-4443-b9d0-7602324567c8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.858589] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9be95d1-2f3e-4e45-9048-2fe01c432e5c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.865183] env[67008]: DEBUG oslo_vmware.api [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': task-2824982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071443} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.866532] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.866724] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1658.866893] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1658.867076] env[67008]: INFO nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1658.868810] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4ad20064-47ca-4404-8186-fa52bfa4d272 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.870654] env[67008]: DEBUG nova.compute.claims [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1658.870830] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1658.871051] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1658.892619] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1659.084667] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b48322a-bfcf-478e-b218-9202f2a9cd42 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.092422] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c62e68e-e7cf-4647-8d83-3110673b2a2e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.121629] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1659.123195] env[67008]: ERROR nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = getattr(controller, method)(*args, **kwargs) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._get(image_id) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] resp, body = self.http_client.get(url, headers=header) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.request(url, 'GET', **kwargs) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._handle_response(resp) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exc.from_response(resp, resp.content) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] During handling of the above exception, another exception occurred: [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] yield resources [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self.driver.spawn(context, instance, image_meta, [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._fetch_image_if_missing(context, vi) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image_fetch(context, vi, tmp_image_ds_loc) [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] images.fetch_image( [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1659.123195] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] metadata = IMAGE_API.get(context, image_ref) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return session.show(context, image_id, [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] _reraise_translated_image_exception(image_id) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise new_exc.with_traceback(exc_trace) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = getattr(controller, method)(*args, **kwargs) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._get(image_id) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] resp, body = self.http_client.get(url, headers=header) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.request(url, 'GET', **kwargs) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._handle_response(resp) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exc.from_response(resp, resp.content) [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] nova.exception.ImageNotAuthorized: Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. [ 1659.124210] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1659.124210] env[67008]: INFO nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Terminating instance [ 1659.125441] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cf1590-328d-4a81-ab82-cc38d6053fa3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.127868] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1659.128094] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1659.128721] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1659.128907] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1659.129470] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0f09b16-bfda-41b8-b856-990f4e568362 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.132391] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c30d1c-7884-4152-9d82-81416dc0413a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.139946] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695a73ad-fc17-4c49-851e-ea912679c3a7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.146015] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1659.146218] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1659.147589] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-729173d6-c016-4826-8a44-a7a10f793247 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.158662] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1659.159101] env[67008]: DEBUG nova.compute.provider_tree [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1659.160423] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a7ef97d-0fec-4dee-9ee9-7394cd586fbd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.163583] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1659.163583] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52acc6b1-776a-7c8a-f18c-ffb8be8d01c3" [ 1659.163583] env[67008]: _type = "Task" [ 1659.163583] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.170268] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52acc6b1-776a-7c8a-f18c-ffb8be8d01c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.171539] env[67008]: DEBUG nova.scheduler.client.report [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1659.186529] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.315s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1659.187079] env[67008]: ERROR nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1659.187079] env[67008]: Faults: ['InvalidArgument'] [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Traceback (most recent call last): [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self.driver.spawn(context, instance, image_meta, [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self._fetch_image_if_missing(context, vi) [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] image_cache(vi, tmp_image_ds_loc) [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] vm_util.copy_virtual_disk( [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] session._wait_for_task(vmdk_copy_task) [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] return self.wait_for_task(task_ref) [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] return evt.wait() [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] result = hub.switch() [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] return self.greenlet.switch() [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] self.f(*self.args, **self.kw) [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] raise exceptions.translate_fault(task_info.error) [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Faults: ['InvalidArgument'] [ 1659.187079] env[67008]: ERROR nova.compute.manager [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] [ 1659.188138] env[67008]: DEBUG nova.compute.utils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1659.189326] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Build of instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 was re-scheduled: A specified parameter was not correct: fileType [ 1659.189326] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1659.189705] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1659.189875] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1659.190055] env[67008]: DEBUG nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1659.190241] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1659.225577] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1659.225803] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1659.225983] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Deleting the datastore file [datastore1] 1f040a29-196b-4a5c-808f-53dc56f3facc {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1659.226260] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0d53d59-b548-4b7d-bf02-af23759f3262 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.232534] env[67008]: DEBUG oslo_vmware.api [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for the task: (returnval){ [ 1659.232534] env[67008]: value = "task-2824984" [ 1659.232534] env[67008]: _type = "Task" [ 1659.232534] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.240313] env[67008]: DEBUG oslo_vmware.api [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': task-2824984, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.517724] env[67008]: DEBUG nova.network.neutron [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.531576] env[67008]: INFO nova.compute.manager [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Took 0.34 seconds to deallocate network for instance. [ 1659.635970] env[67008]: INFO nova.scheduler.client.report [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Deleted allocations for instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 [ 1659.659205] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5f770c27-465d-41a5-acb7-5c5b418f842d tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 653.779s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1659.660024] env[67008]: DEBUG oslo_concurrency.lockutils [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 458.031s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1659.660148] env[67008]: DEBUG oslo_concurrency.lockutils [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1659.660350] env[67008]: DEBUG oslo_concurrency.lockutils [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1659.661584] env[67008]: DEBUG oslo_concurrency.lockutils [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1659.662796] env[67008]: INFO nova.compute.manager [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Terminating instance [ 1659.664555] env[67008]: DEBUG nova.compute.manager [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1659.664939] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1659.665462] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7844968-539e-4dbb-8838-0611d76e1142 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.671685] env[67008]: DEBUG nova.compute.manager [None req-ea46af2f-a6e5-45c2-8635-542a805358c8 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: db85cf50-33c1-4433-ad83-cd33ee24811b] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1659.681735] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1659.681977] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1659.682262] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-657e29f9-4a18-4388-b9b2-be88ec8ed540 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.686566] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd99cad5-2013-4427-a7bd-73bc23d2bd90 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.698392] env[67008]: DEBUG nova.compute.manager [None req-ea46af2f-a6e5-45c2-8635-542a805358c8 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] [instance: db85cf50-33c1-4433-ad83-cd33ee24811b] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1659.719365] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 94e8ddc5-d43c-49d5-93c6-f08081ed7643 could not be found. [ 1659.719560] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1659.719762] env[67008]: INFO nova.compute.manager [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1659.720011] env[67008]: DEBUG oslo.service.loopingcall [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.722792] env[67008]: DEBUG nova.compute.manager [-] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1659.722898] env[67008]: DEBUG nova.network.neutron [-] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1659.724657] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1659.724833] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Fetch image to [datastore1] vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1659.724996] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1659.726141] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23638a0-e117-4108-ab4a-f24802775a50 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.733888] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea46af2f-a6e5-45c2-8635-542a805358c8 tempest-ServerDiskConfigTestJSON-160270024 tempest-ServerDiskConfigTestJSON-160270024-project-member] Lock "db85cf50-33c1-4433-ad83-cd33ee24811b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.018s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1659.737382] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9083657b-4966-424c-8070-c1b4d59fe001 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.745569] env[67008]: DEBUG oslo_vmware.api [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Task: {'id': task-2824984, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073641} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.751135] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1659.751321] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1659.751487] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1659.751652] env[67008]: INFO nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1659.755076] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99843b09-841a-427a-bc90-073c17924c95 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.759259] env[67008]: DEBUG nova.compute.manager [None req-28d3e009-4770-4e30-8dbb-717a0f2c595a tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] [instance: c578d694-4652-4289-81f4-2b00ba20d7fb] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1659.762317] env[67008]: DEBUG nova.compute.claims [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1659.762497] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1659.762707] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1659.765722] env[67008]: DEBUG nova.network.neutron [-] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.796231] env[67008]: INFO nova.compute.manager [-] [instance: 94e8ddc5-d43c-49d5-93c6-f08081ed7643] Took 0.07 seconds to deallocate network for instance. [ 1659.796590] env[67008]: DEBUG nova.compute.manager [None req-28d3e009-4770-4e30-8dbb-717a0f2c595a tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] [instance: c578d694-4652-4289-81f4-2b00ba20d7fb] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1659.800954] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196b1a6e-313a-494d-bfa1-100cad5984ec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.814126] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a6d09496-4d00-4a23-8ac5-e0282b88cb06 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.824224] env[67008]: DEBUG oslo_concurrency.lockutils [None req-28d3e009-4770-4e30-8dbb-717a0f2c595a tempest-AttachVolumeNegativeTest-709865151 tempest-AttachVolumeNegativeTest-709865151-project-member] Lock "c578d694-4652-4289-81f4-2b00ba20d7fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.586s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1659.835387] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1659.837899] env[67008]: DEBUG nova.compute.manager [None req-ca7d601c-c4dd-4da9-bc0c-9a283401e57a tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 2d01d2f3-212a-41c1-8b00-ca9f7090f239] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1659.863207] env[67008]: DEBUG nova.compute.manager [None req-ca7d601c-c4dd-4da9-bc0c-9a283401e57a tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 2d01d2f3-212a-41c1-8b00-ca9f7090f239] Instance disappeared before build. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1659.882600] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ca7d601c-c4dd-4da9-bc0c-9a283401e57a tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "2d01d2f3-212a-41c1-8b00-ca9f7090f239" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.039s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1659.894560] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1659.900866] env[67008]: DEBUG oslo_vmware.rw_handles [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1659.962424] env[67008]: DEBUG oslo_concurrency.lockutils [None req-019f4c33-2ec3-47d5-b2ab-1e6507bfb5b2 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "94e8ddc5-d43c-49d5-93c6-f08081ed7643" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.302s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1659.966453] env[67008]: DEBUG oslo_vmware.rw_handles [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1659.966453] env[67008]: DEBUG oslo_vmware.rw_handles [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1659.984307] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1660.031034] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5df3958-5a26-455f-b033-a4543032571b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.039208] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6196bc4d-5f18-4555-abb9-49b2e64ed097 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.068698] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7389406b-7b12-499a-b598-90834e046636 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.075269] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9627dd5e-692d-4143-9ba8-49048ed02cfe {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.087960] env[67008]: DEBUG nova.compute.provider_tree [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.096649] env[67008]: DEBUG nova.scheduler.client.report [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1660.109580] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.347s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1660.110289] env[67008]: ERROR nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = getattr(controller, method)(*args, **kwargs) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._get(image_id) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] resp, body = self.http_client.get(url, headers=header) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.request(url, 'GET', **kwargs) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._handle_response(resp) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exc.from_response(resp, resp.content) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] During handling of the above exception, another exception occurred: [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self.driver.spawn(context, instance, image_meta, [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._fetch_image_if_missing(context, vi) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image_fetch(context, vi, tmp_image_ds_loc) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] images.fetch_image( [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] metadata = IMAGE_API.get(context, image_ref) [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1660.110289] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return session.show(context, image_id, [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] _reraise_translated_image_exception(image_id) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise new_exc.with_traceback(exc_trace) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = getattr(controller, method)(*args, **kwargs) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._get(image_id) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] resp, body = self.http_client.get(url, headers=header) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.request(url, 'GET', **kwargs) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._handle_response(resp) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exc.from_response(resp, resp.content) [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] nova.exception.ImageNotAuthorized: Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. [ 1660.111334] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.111334] env[67008]: DEBUG nova.compute.utils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1660.111963] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.128s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1660.113283] env[67008]: INFO nova.compute.claims [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1660.116838] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Build of instance 1f040a29-196b-4a5c-808f-53dc56f3facc was re-scheduled: Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1660.117301] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1660.117485] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1660.117643] env[67008]: DEBUG nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1660.117802] env[67008]: DEBUG nova.network.neutron [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1660.268915] env[67008]: DEBUG neutronclient.v2_0.client [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67008) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1660.270186] env[67008]: ERROR nova.compute.manager [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = getattr(controller, method)(*args, **kwargs) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._get(image_id) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] resp, body = self.http_client.get(url, headers=header) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.request(url, 'GET', **kwargs) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._handle_response(resp) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exc.from_response(resp, resp.content) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] During handling of the above exception, another exception occurred: [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self.driver.spawn(context, instance, image_meta, [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._fetch_image_if_missing(context, vi) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image_fetch(context, vi, tmp_image_ds_loc) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] images.fetch_image( [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] metadata = IMAGE_API.get(context, image_ref) [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1660.270186] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return session.show(context, image_id, [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] _reraise_translated_image_exception(image_id) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise new_exc.with_traceback(exc_trace) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = getattr(controller, method)(*args, **kwargs) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._get(image_id) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] resp, body = self.http_client.get(url, headers=header) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.request(url, 'GET', **kwargs) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self._handle_response(resp) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exc.from_response(resp, resp.content) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] nova.exception.ImageNotAuthorized: Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] During handling of the above exception, another exception occurred: [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._build_and_run_instance(context, instance, image, [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exception.RescheduledException( [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] nova.exception.RescheduledException: Build of instance 1f040a29-196b-4a5c-808f-53dc56f3facc was re-scheduled: Not authorized for image ae01aa56-93e6-47e6-accd-8c8a802d92bd. [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] During handling of the above exception, another exception occurred: [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] exception_handler_v20(status_code, error_body) [ 1660.271272] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise client_exc(message=error_message, [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Neutron server returns request_ids: ['req-03b13134-79c2-4f84-9ed8-68841057934a'] [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] During handling of the above exception, another exception occurred: [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._deallocate_network(context, instance, requested_networks) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self.network_api.deallocate_for_instance( [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] data = neutron.list_ports(**search_opts) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.list('ports', self.ports_path, retrieve_all, [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] for r in self._pagination(collection, path, **params): [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] res = self.get(path, params=params) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.retry_request("GET", action, body=body, [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.do_request(method, action, body=body, [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._handle_fault_response(status_code, replybody, resp) [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exception.Unauthorized() [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] nova.exception.Unauthorized: Not authorized. [ 1660.272400] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.314090] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2be647-b328-45b9-a9d7-e36b088a4905 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.322116] env[67008]: INFO nova.scheduler.client.report [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Deleted allocations for instance 1f040a29-196b-4a5c-808f-53dc56f3facc [ 1660.331230] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a4142f-f32c-49f3-a8bb-ae5396b32651 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.361654] env[67008]: DEBUG oslo_concurrency.lockutils [None req-addf8bc6-20be-45cb-b340-1f5d0d240baf tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.608s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1660.362391] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e90541a-2730-4821-96ac-6670f2c52bd3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.365885] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.886s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1660.366018] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Acquiring lock "1f040a29-196b-4a5c-808f-53dc56f3facc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1660.366230] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1660.366397] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1660.368247] env[67008]: INFO nova.compute.manager [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Terminating instance [ 1660.370059] env[67008]: DEBUG nova.compute.manager [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1660.370262] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1660.370709] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b08569dd-20b0-400c-8c3c-ce10a19f463c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.376316] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14d962f-51c6-404b-b7eb-8370780c42a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.380208] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1660.386536] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a81b061-ba59-4687-8164-98dde4389e4d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.404304] env[67008]: DEBUG nova.compute.provider_tree [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.411309] env[67008]: DEBUG nova.scheduler.client.report [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1660.426503] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f040a29-196b-4a5c-808f-53dc56f3facc could not be found. [ 1660.426695] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1660.426868] env[67008]: INFO nova.compute.manager [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1660.427114] env[67008]: DEBUG oslo.service.loopingcall [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1660.427801] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1660.428251] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1660.432129] env[67008]: DEBUG nova.compute.manager [-] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1660.432237] env[67008]: DEBUG nova.network.neutron [-] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1660.445066] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1660.445306] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1660.446817] env[67008]: INFO nova.compute.claims [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1660.466566] env[67008]: DEBUG nova.compute.utils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1660.467746] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1660.467912] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1660.478437] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1660.537430] env[67008]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67008) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1660.537430] env[67008]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-98a78696-5729-4039-b185-141423d29650'] [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1660.537896] env[67008]: ERROR oslo.service.loopingcall [ 1660.539347] env[67008]: ERROR nova.compute.manager [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1660.547158] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1660.567580] env[67008]: ERROR nova.compute.manager [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] exception_handler_v20(status_code, error_body) [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise client_exc(message=error_message, [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Neutron server returns request_ids: ['req-98a78696-5729-4039-b185-141423d29650'] [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] During handling of the above exception, another exception occurred: [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Traceback (most recent call last): [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._delete_instance(context, instance, bdms) [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._shutdown_instance(context, instance, bdms) [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._try_deallocate_network(context, instance, requested_networks) [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] with excutils.save_and_reraise_exception(): [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self.force_reraise() [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise self.value [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] _deallocate_network_with_retries() [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return evt.wait() [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = hub.switch() [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.greenlet.switch() [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = func(*self.args, **self.kw) [ 1660.567580] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] result = f(*args, **kwargs) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._deallocate_network( [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self.network_api.deallocate_for_instance( [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] data = neutron.list_ports(**search_opts) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.list('ports', self.ports_path, retrieve_all, [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] for r in self._pagination(collection, path, **params): [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] res = self.get(path, params=params) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.retry_request("GET", action, body=body, [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] return self.do_request(method, action, body=body, [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] ret = obj(*args, **kwargs) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] self._handle_fault_response(status_code, replybody, resp) [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1660.568533] env[67008]: ERROR nova.compute.manager [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] [ 1660.571472] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1660.571742] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1660.571847] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1660.572032] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1660.572183] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1660.572329] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1660.572525] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1660.572683] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1660.572845] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1660.573015] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1660.573263] env[67008]: DEBUG nova.virt.hardware [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1660.574366] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bfd72c-8cfe-482c-80c9-ee0ae0fa7985 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.580400] env[67008]: DEBUG nova.policy [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5343dcbda10b4898b07cfc371ea9e355', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2ffa090d43c4facaec9fcb96575a5f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1660.590159] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b838717-18b0-4336-bda0-1b700f1af7e9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.610813] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Lock "1f040a29-196b-4a5c-808f-53dc56f3facc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.245s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1660.674030] env[67008]: INFO nova.compute.manager [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] [instance: 1f040a29-196b-4a5c-808f-53dc56f3facc] Successfully reverted task state from None on failure for instance. [ 1660.678108] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d1591ae-f57f-49d6-aba3-d73a9857c950 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server [None req-1ba4a820-c0b8-41c6-ae64-122b23d48d5b tempest-DeleteServersAdminTestJSON-2001479597 tempest-DeleteServersAdminTestJSON-2001479597-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-98a78696-5729-4039-b185-141423d29650'] [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1660.682410] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1660.684184] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1660.686876] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1660.686876] env[67008]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1660.686876] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1660.686876] env[67008]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1660.686876] env[67008]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1660.686876] env[67008]: ERROR oslo_messaging.rpc.server [ 1660.687374] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5d34c1-5a1d-4593-bfde-dc9547a98c56 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.719230] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af4a9e1-fcd5-43ae-83a7-575e8b9645b2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.726862] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940b5adc-3801-4056-ac54-a3b7885a4b17 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.740592] env[67008]: DEBUG nova.compute.provider_tree [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.748482] env[67008]: DEBUG nova.scheduler.client.report [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1660.786519] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.341s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1660.786671] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1660.820379] env[67008]: DEBUG nova.compute.utils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1660.821635] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1660.821803] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1660.830535] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1660.890934] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Successfully created port: c78f84fc-d8bf-404e-bab9-73d74082bc25 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1660.894500] env[67008]: DEBUG nova.policy [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '446098f8cb314cb8a999cce83b377778', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73bcd75224ba46ce808f5b052305728c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1660.915965] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1660.950875] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1660.951147] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1660.951322] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1660.951617] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1660.951680] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1660.951782] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1660.951983] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1660.952303] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1660.952462] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1660.952640] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1660.953182] env[67008]: DEBUG nova.virt.hardware [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1660.954013] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c96eb9-f580-46e2-ac4f-73700da9ff1f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.962533] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d0617e-fdaf-4c4e-b409-f0244b7c117b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.308773] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Successfully created port: 2b9dc228-6928-4c6a-a38b-00f7d2c54296 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1661.514104] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Successfully updated port: c78f84fc-d8bf-404e-bab9-73d74082bc25 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1661.528353] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "refresh_cache-6ede77a1-aa76-4e9f-8beb-80131e7990da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1661.528505] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "refresh_cache-6ede77a1-aa76-4e9f-8beb-80131e7990da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1661.528656] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1661.556296] env[67008]: DEBUG nova.compute.manager [req-65394c03-cf1b-4aca-a499-d3b9d882c094 req-c39236f7-33f8-40e2-b568-1fc50222f3ae service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Received event network-vif-plugged-c78f84fc-d8bf-404e-bab9-73d74082bc25 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1661.556518] env[67008]: DEBUG oslo_concurrency.lockutils [req-65394c03-cf1b-4aca-a499-d3b9d882c094 req-c39236f7-33f8-40e2-b568-1fc50222f3ae service nova] Acquiring lock "6ede77a1-aa76-4e9f-8beb-80131e7990da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1661.556807] env[67008]: DEBUG oslo_concurrency.lockutils [req-65394c03-cf1b-4aca-a499-d3b9d882c094 req-c39236f7-33f8-40e2-b568-1fc50222f3ae service nova] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1661.556959] env[67008]: DEBUG oslo_concurrency.lockutils [req-65394c03-cf1b-4aca-a499-d3b9d882c094 req-c39236f7-33f8-40e2-b568-1fc50222f3ae service nova] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1661.557049] env[67008]: DEBUG nova.compute.manager [req-65394c03-cf1b-4aca-a499-d3b9d882c094 req-c39236f7-33f8-40e2-b568-1fc50222f3ae service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] No waiting events found dispatching network-vif-plugged-c78f84fc-d8bf-404e-bab9-73d74082bc25 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1661.557223] env[67008]: WARNING nova.compute.manager [req-65394c03-cf1b-4aca-a499-d3b9d882c094 req-c39236f7-33f8-40e2-b568-1fc50222f3ae service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Received unexpected event network-vif-plugged-c78f84fc-d8bf-404e-bab9-73d74082bc25 for instance with vm_state building and task_state spawning. [ 1661.579980] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1661.750033] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Updating instance_info_cache with network_info: [{"id": "c78f84fc-d8bf-404e-bab9-73d74082bc25", "address": "fa:16:3e:73:fe:be", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc78f84fc-d8", "ovs_interfaceid": "c78f84fc-d8bf-404e-bab9-73d74082bc25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.763190] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "refresh_cache-6ede77a1-aa76-4e9f-8beb-80131e7990da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1661.763505] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Instance network_info: |[{"id": "c78f84fc-d8bf-404e-bab9-73d74082bc25", "address": "fa:16:3e:73:fe:be", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc78f84fc-d8", "ovs_interfaceid": "c78f84fc-d8bf-404e-bab9-73d74082bc25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1661.763893] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:fe:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c78f84fc-d8bf-404e-bab9-73d74082bc25', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1661.771403] env[67008]: DEBUG oslo.service.loopingcall [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1661.771877] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1661.772116] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc7a9ec3-c495-4ebe-a97d-db96d16daa32 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.796146] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1661.796146] env[67008]: value = "task-2824985" [ 1661.796146] env[67008]: _type = "Task" [ 1661.796146] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.804565] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824985, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.031205] env[67008]: DEBUG nova.compute.manager [req-31eae4b6-35c4-4f3c-94b3-9fe1246ae657 req-8bf413ac-d020-46c2-b8d2-2e9421e4c36a service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Received event network-vif-plugged-2b9dc228-6928-4c6a-a38b-00f7d2c54296 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1662.031205] env[67008]: DEBUG oslo_concurrency.lockutils [req-31eae4b6-35c4-4f3c-94b3-9fe1246ae657 req-8bf413ac-d020-46c2-b8d2-2e9421e4c36a service nova] Acquiring lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1662.031205] env[67008]: DEBUG oslo_concurrency.lockutils [req-31eae4b6-35c4-4f3c-94b3-9fe1246ae657 req-8bf413ac-d020-46c2-b8d2-2e9421e4c36a service nova] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1662.031205] env[67008]: DEBUG oslo_concurrency.lockutils [req-31eae4b6-35c4-4f3c-94b3-9fe1246ae657 req-8bf413ac-d020-46c2-b8d2-2e9421e4c36a service nova] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1662.031205] env[67008]: DEBUG nova.compute.manager [req-31eae4b6-35c4-4f3c-94b3-9fe1246ae657 req-8bf413ac-d020-46c2-b8d2-2e9421e4c36a service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] No waiting events found dispatching network-vif-plugged-2b9dc228-6928-4c6a-a38b-00f7d2c54296 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1662.031962] env[67008]: WARNING nova.compute.manager [req-31eae4b6-35c4-4f3c-94b3-9fe1246ae657 req-8bf413ac-d020-46c2-b8d2-2e9421e4c36a service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Received unexpected event network-vif-plugged-2b9dc228-6928-4c6a-a38b-00f7d2c54296 for instance with vm_state building and task_state spawning. [ 1662.109831] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Successfully updated port: 2b9dc228-6928-4c6a-a38b-00f7d2c54296 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1662.117205] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "refresh_cache-fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1662.117376] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired lock "refresh_cache-fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1662.117535] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1662.187641] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1662.306536] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824985, 'name': CreateVM_Task, 'duration_secs': 0.337923} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.306720] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1662.307692] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1662.307930] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1662.308333] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1662.308949] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c24da6f5-bfb7-4029-a4e1-b928d2539ff6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.313548] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1662.313548] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f0eb3c-42e0-d27e-03c9-6019db5a582f" [ 1662.313548] env[67008]: _type = "Task" [ 1662.313548] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.321159] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f0eb3c-42e0-d27e-03c9-6019db5a582f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.585742] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Updating instance_info_cache with network_info: [{"id": "2b9dc228-6928-4c6a-a38b-00f7d2c54296", "address": "fa:16:3e:c9:d9:b5", "network": {"id": "59eb95be-bced-401f-87ad-f8678fd917e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2123186140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73bcd75224ba46ce808f5b052305728c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9dc228-69", "ovs_interfaceid": "2b9dc228-6928-4c6a-a38b-00f7d2c54296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.595751] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Releasing lock "refresh_cache-fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1662.596285] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Instance network_info: |[{"id": "2b9dc228-6928-4c6a-a38b-00f7d2c54296", "address": "fa:16:3e:c9:d9:b5", "network": {"id": "59eb95be-bced-401f-87ad-f8678fd917e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2123186140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73bcd75224ba46ce808f5b052305728c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9dc228-69", "ovs_interfaceid": "2b9dc228-6928-4c6a-a38b-00f7d2c54296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1662.597982] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:d9:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '459b8c74-0aa6-42b6-996a-42b1c5d7e5c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b9dc228-6928-4c6a-a38b-00f7d2c54296', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1662.604946] env[67008]: DEBUG oslo.service.loopingcall [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1662.605477] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1662.605760] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bbe8fea-f1ee-4ae4-9a01-c0424182fc1f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.625949] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1662.625949] env[67008]: value = "task-2824986" [ 1662.625949] env[67008]: _type = "Task" [ 1662.625949] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.633826] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824986, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.826600] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1662.826936] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1662.827199] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1663.136223] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824986, 'name': CreateVM_Task, 'duration_secs': 0.287912} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.136499] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1663.137084] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1663.137271] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1663.137575] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1663.137826] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccd31dce-2018-45c6-b1eb-78ba600bc985 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.142081] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 1663.142081] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e94ede-23fb-08e6-c255-d47a76951521" [ 1663.142081] env[67008]: _type = "Task" [ 1663.142081] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.149236] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e94ede-23fb-08e6-c255-d47a76951521, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.589820] env[67008]: DEBUG nova.compute.manager [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Received event network-changed-c78f84fc-d8bf-404e-bab9-73d74082bc25 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1663.590085] env[67008]: DEBUG nova.compute.manager [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Refreshing instance network info cache due to event network-changed-c78f84fc-d8bf-404e-bab9-73d74082bc25. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1663.590247] env[67008]: DEBUG oslo_concurrency.lockutils [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] Acquiring lock "refresh_cache-6ede77a1-aa76-4e9f-8beb-80131e7990da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1663.590391] env[67008]: DEBUG oslo_concurrency.lockutils [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] Acquired lock "refresh_cache-6ede77a1-aa76-4e9f-8beb-80131e7990da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1663.590551] env[67008]: DEBUG nova.network.neutron [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Refreshing network info cache for port c78f84fc-d8bf-404e-bab9-73d74082bc25 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1663.655401] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1663.655639] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1663.655846] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1663.933515] env[67008]: DEBUG nova.network.neutron [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Updated VIF entry in instance network info cache for port c78f84fc-d8bf-404e-bab9-73d74082bc25. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1663.933973] env[67008]: DEBUG nova.network.neutron [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Updating instance_info_cache with network_info: [{"id": "c78f84fc-d8bf-404e-bab9-73d74082bc25", "address": "fa:16:3e:73:fe:be", "network": {"id": "d2698162-b696-472c-a947-914a912bdb74", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1314793207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2ffa090d43c4facaec9fcb96575a5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc78f84fc-d8", "ovs_interfaceid": "c78f84fc-d8bf-404e-bab9-73d74082bc25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.945476] env[67008]: DEBUG oslo_concurrency.lockutils [req-12bd76c8-5bff-4c8c-a7ea-bf843b38f965 req-037524cf-84d4-42e7-8657-b6a8d79ac73b service nova] Releasing lock "refresh_cache-6ede77a1-aa76-4e9f-8beb-80131e7990da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1664.058349] env[67008]: DEBUG nova.compute.manager [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Received event network-changed-2b9dc228-6928-4c6a-a38b-00f7d2c54296 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1664.058349] env[67008]: DEBUG nova.compute.manager [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Refreshing instance network info cache due to event network-changed-2b9dc228-6928-4c6a-a38b-00f7d2c54296. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1664.058349] env[67008]: DEBUG oslo_concurrency.lockutils [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] Acquiring lock "refresh_cache-fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1664.058349] env[67008]: DEBUG oslo_concurrency.lockutils [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] Acquired lock "refresh_cache-fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1664.058349] env[67008]: DEBUG nova.network.neutron [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Refreshing network info cache for port 2b9dc228-6928-4c6a-a38b-00f7d2c54296 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1664.360179] env[67008]: DEBUG nova.network.neutron [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Updated VIF entry in instance network info cache for port 2b9dc228-6928-4c6a-a38b-00f7d2c54296. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1664.360547] env[67008]: DEBUG nova.network.neutron [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Updating instance_info_cache with network_info: [{"id": "2b9dc228-6928-4c6a-a38b-00f7d2c54296", "address": "fa:16:3e:c9:d9:b5", "network": {"id": "59eb95be-bced-401f-87ad-f8678fd917e5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2123186140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73bcd75224ba46ce808f5b052305728c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "459b8c74-0aa6-42b6-996a-42b1c5d7e5c6", "external-id": "nsx-vlan-transportzone-467", "segmentation_id": 467, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b9dc228-69", "ovs_interfaceid": "2b9dc228-6928-4c6a-a38b-00f7d2c54296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.370050] env[67008]: DEBUG oslo_concurrency.lockutils [req-1ac91b09-829f-4385-b2c2-49fd948756eb req-c5bbe179-d18f-479e-9acf-bff973b4e3a5 service nova] Releasing lock "refresh_cache-fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1672.832866] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1683.859499] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1684.857346] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1684.869195] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1684.869561] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1684.869561] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1684.869724] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1684.870864] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7007a499-b487-4f84-be9a-cebe4fff52da {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.880524] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a9281c-82f6-4e6d-960b-02ebf8a7c412 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.895540] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4690d3-4f19-4da6-9761-db36de721fb6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.902403] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f38826-f50e-42e1-92f0-d1c1ba79af69 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.934902] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181071MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1684.935127] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1684.935283] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1685.022057] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.022235] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.022367] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.022493] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.022617] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.022732] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.022846] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.022959] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.023083] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.023283] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1685.037402] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1685.049821] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1685.050087] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1685.050242] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1685.222820] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36d7c08-ed1e-417f-ac6a-6a9262e8b252 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.230536] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa70c25f-0ffa-42ca-9d74-3ead7f8bc69f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.260278] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4202a1df-545d-4a77-9b67-aa66776cf292 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.267650] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc081d4-a414-47ea-92d6-b8b1eaea2588 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.282156] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.291223] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1685.307476] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1685.307703] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.372s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1689.307355] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.852206] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.855872] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.856090] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.856243] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1690.856588] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1690.856880] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1690.856880] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1690.875989] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.876167] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.876304] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.876428] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.876553] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.876675] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.876795] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.876913] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.877040] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.877160] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1690.877276] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1691.856794] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.857173] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1706.438018] env[67008]: WARNING oslo_vmware.rw_handles [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1706.438018] env[67008]: ERROR oslo_vmware.rw_handles [ 1706.438778] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1706.440507] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1706.440774] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Copying Virtual Disk [datastore1] vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/ca0d20a7-f260-49ee-a3db-899ead4d5505/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1706.441108] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5af0ff5-4513-489b-bf38-ff07b5c1fcba {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.448846] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1706.448846] env[67008]: value = "task-2824987" [ 1706.448846] env[67008]: _type = "Task" [ 1706.448846] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.456682] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.960111] env[67008]: DEBUG oslo_vmware.exceptions [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1706.960409] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1706.960991] env[67008]: ERROR nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1706.960991] env[67008]: Faults: ['InvalidArgument'] [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Traceback (most recent call last): [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] yield resources [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self.driver.spawn(context, instance, image_meta, [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self._fetch_image_if_missing(context, vi) [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] image_cache(vi, tmp_image_ds_loc) [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] vm_util.copy_virtual_disk( [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] session._wait_for_task(vmdk_copy_task) [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] return self.wait_for_task(task_ref) [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] return evt.wait() [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] result = hub.switch() [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] return self.greenlet.switch() [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self.f(*self.args, **self.kw) [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] raise exceptions.translate_fault(task_info.error) [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Faults: ['InvalidArgument'] [ 1706.960991] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] [ 1706.961905] env[67008]: INFO nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Terminating instance [ 1706.962883] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1706.963115] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1706.963360] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33eb44f1-bb8f-447a-ae87-43ebc1b07665 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.965594] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1706.965789] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1706.966515] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1060fc24-b84f-4487-a109-89741363f7d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.973562] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1706.974571] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19487a14-ebf4-42fc-a667-b8eb43bcd0d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.975917] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1706.976099] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1706.976764] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96e5d1b5-8166-4ad7-991e-06a66ad4e02c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.981353] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Waiting for the task: (returnval){ [ 1706.981353] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52dac7e8-c798-43ea-f10d-86e88f16ad53" [ 1706.981353] env[67008]: _type = "Task" [ 1706.981353] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.988518] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52dac7e8-c798-43ea-f10d-86e88f16ad53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.040881] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1707.041137] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1707.041322] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleting the datastore file [datastore1] 042421f3-9b91-4fb2-bc3c-0d97e93ad78e {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1707.041621] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cd77301-fb66-4651-9d28-752daf3b85d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.048097] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 1707.048097] env[67008]: value = "task-2824989" [ 1707.048097] env[67008]: _type = "Task" [ 1707.048097] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.055160] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824989, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.491788] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1707.492121] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Creating directory with path [datastore1] vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1707.492230] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1811f5de-c31f-41d9-b16d-a51a0e94c5a9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.502814] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Created directory with path [datastore1] vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1707.503007] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Fetch image to [datastore1] vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1707.503192] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1707.503914] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da697da-0ddc-4dbd-8cec-7d3819000bc0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.510124] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b263724-7a63-40db-a229-526481f1d558 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.519111] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4efab3-24d1-4f9d-9cec-2b683294d036 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.553088] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c213340-da1d-4a09-abed-3544e1a01bd4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.560974] env[67008]: DEBUG oslo_vmware.api [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2824989, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077071} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.561518] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1707.561716] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1707.561886] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1707.562069] env[67008]: INFO nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1707.563593] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9f1773a1-271e-41e0-85fe-b0582cf63ea6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.565408] env[67008]: DEBUG nova.compute.claims [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1707.565607] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1707.565895] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1707.586681] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1707.639885] env[67008]: DEBUG oslo_vmware.rw_handles [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1707.700084] env[67008]: DEBUG oslo_vmware.rw_handles [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1707.700084] env[67008]: DEBUG oslo_vmware.rw_handles [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1707.814268] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d86696-59bd-4589-ba55-c18a0b0aaa07 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.822369] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940d55f1-9a72-446d-8022-7cca33c406e7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.853687] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4836c3-a180-42da-aa10-db86990808ec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.861160] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4d461e-e14f-403e-8572-a557588344e4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.873977] env[67008]: DEBUG nova.compute.provider_tree [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1707.882990] env[67008]: DEBUG nova.scheduler.client.report [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1707.895934] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1707.896469] env[67008]: ERROR nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1707.896469] env[67008]: Faults: ['InvalidArgument'] [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Traceback (most recent call last): [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self.driver.spawn(context, instance, image_meta, [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self._fetch_image_if_missing(context, vi) [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] image_cache(vi, tmp_image_ds_loc) [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] vm_util.copy_virtual_disk( [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] session._wait_for_task(vmdk_copy_task) [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] return self.wait_for_task(task_ref) [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] return evt.wait() [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] result = hub.switch() [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] return self.greenlet.switch() [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] self.f(*self.args, **self.kw) [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] raise exceptions.translate_fault(task_info.error) [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Faults: ['InvalidArgument'] [ 1707.896469] env[67008]: ERROR nova.compute.manager [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] [ 1707.897435] env[67008]: DEBUG nova.compute.utils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1707.898527] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Build of instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e was re-scheduled: A specified parameter was not correct: fileType [ 1707.898527] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1707.898898] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1707.899086] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1707.899260] env[67008]: DEBUG nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1707.899421] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1708.163813] env[67008]: DEBUG nova.network.neutron [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.175935] env[67008]: INFO nova.compute.manager [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Took 0.28 seconds to deallocate network for instance. [ 1708.270137] env[67008]: INFO nova.scheduler.client.report [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted allocations for instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e [ 1708.297397] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d456f6fa-a3c3-4a18-bcd0-eab5c5e5a484 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.857s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1708.298947] env[67008]: DEBUG oslo_concurrency.lockutils [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 427.350s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1708.298947] env[67008]: DEBUG oslo_concurrency.lockutils [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1708.299143] env[67008]: DEBUG oslo_concurrency.lockutils [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1708.299211] env[67008]: DEBUG oslo_concurrency.lockutils [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1708.301392] env[67008]: INFO nova.compute.manager [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Terminating instance [ 1708.303390] env[67008]: DEBUG nova.compute.manager [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1708.303632] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1708.304290] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b59e7a8e-4e49-4c9b-8f18-aacc3d159655 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.314947] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cb8898-9add-4848-9ecd-4a375012f8f6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.325930] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1708.348487] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 042421f3-9b91-4fb2-bc3c-0d97e93ad78e could not be found. [ 1708.348738] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1708.348931] env[67008]: INFO nova.compute.manager [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1708.349143] env[67008]: DEBUG oslo.service.loopingcall [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1708.349385] env[67008]: DEBUG nova.compute.manager [-] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1708.349486] env[67008]: DEBUG nova.network.neutron [-] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1708.374195] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1708.374514] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1708.375923] env[67008]: INFO nova.compute.claims [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1708.388768] env[67008]: DEBUG nova.network.neutron [-] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.405957] env[67008]: INFO nova.compute.manager [-] [instance: 042421f3-9b91-4fb2-bc3c-0d97e93ad78e] Took 0.06 seconds to deallocate network for instance. [ 1708.507887] env[67008]: DEBUG oslo_concurrency.lockutils [None req-cecb1080-b380-4ca6-8173-446191c49307 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "042421f3-9b91-4fb2-bc3c-0d97e93ad78e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.209s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1708.580855] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950e33a2-5b8d-4d7b-89c3-192ef9d69734 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.589126] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0c19bb-ae0f-4ba5-a122-1be9c017c150 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.620660] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc075db9-3b0b-452e-aad9-994d92a608f4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.628771] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d27c6c-ecb2-4c19-9a83-d937f54cea98 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.642346] env[67008]: DEBUG nova.compute.provider_tree [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.651252] env[67008]: DEBUG nova.scheduler.client.report [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1708.666299] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1708.666761] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1708.704529] env[67008]: DEBUG nova.compute.utils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1708.705829] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1708.706010] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1708.715194] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1708.774391] env[67008]: DEBUG nova.policy [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e97f4af320914c1d9256c28327a4a1c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff81b1d640c340dca8bcf28c7c3a2f01', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1708.781476] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1708.807993] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1708.808246] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1708.808402] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1708.808605] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1708.808964] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1708.808964] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1708.809150] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1708.809311] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1708.809477] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1708.809637] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1708.810079] env[67008]: DEBUG nova.virt.hardware [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1708.810695] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2ec6ef-be08-4bff-b3bb-c75c0290b7be {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.819661] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e8f7ad-ba51-4775-83c0-dde52d254f94 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.160497] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Successfully created port: 71ca7fc2-b706-4d5b-bf6c-da054ac2774f {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1709.864038] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Successfully updated port: 71ca7fc2-b706-4d5b-bf6c-da054ac2774f {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1709.874164] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "refresh_cache-260ddcc7-b12c-46f9-9c98-df270b438cd2" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1709.874312] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired lock "refresh_cache-260ddcc7-b12c-46f9-9c98-df270b438cd2" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1709.874461] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1709.909833] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1710.340303] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Updating instance_info_cache with network_info: [{"id": "71ca7fc2-b706-4d5b-bf6c-da054ac2774f", "address": "fa:16:3e:b7:ff:75", "network": {"id": "542bddd3-3fe8-429e-8c06-3a88230d19ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-298851449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff81b1d640c340dca8bcf28c7c3a2f01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71ca7fc2-b7", "ovs_interfaceid": "71ca7fc2-b706-4d5b-bf6c-da054ac2774f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.347677] env[67008]: DEBUG nova.compute.manager [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Received event network-vif-plugged-71ca7fc2-b706-4d5b-bf6c-da054ac2774f {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1710.347850] env[67008]: DEBUG oslo_concurrency.lockutils [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] Acquiring lock "260ddcc7-b12c-46f9-9c98-df270b438cd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1710.348072] env[67008]: DEBUG oslo_concurrency.lockutils [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1710.348222] env[67008]: DEBUG oslo_concurrency.lockutils [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1710.348401] env[67008]: DEBUG nova.compute.manager [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] No waiting events found dispatching network-vif-plugged-71ca7fc2-b706-4d5b-bf6c-da054ac2774f {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1710.348566] env[67008]: WARNING nova.compute.manager [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Received unexpected event network-vif-plugged-71ca7fc2-b706-4d5b-bf6c-da054ac2774f for instance with vm_state building and task_state spawning. [ 1710.348743] env[67008]: DEBUG nova.compute.manager [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Received event network-changed-71ca7fc2-b706-4d5b-bf6c-da054ac2774f {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1710.348896] env[67008]: DEBUG nova.compute.manager [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Refreshing instance network info cache due to event network-changed-71ca7fc2-b706-4d5b-bf6c-da054ac2774f. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1710.349565] env[67008]: DEBUG oslo_concurrency.lockutils [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] Acquiring lock "refresh_cache-260ddcc7-b12c-46f9-9c98-df270b438cd2" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1710.352758] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Releasing lock "refresh_cache-260ddcc7-b12c-46f9-9c98-df270b438cd2" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1710.353036] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Instance network_info: |[{"id": "71ca7fc2-b706-4d5b-bf6c-da054ac2774f", "address": "fa:16:3e:b7:ff:75", "network": {"id": "542bddd3-3fe8-429e-8c06-3a88230d19ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-298851449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff81b1d640c340dca8bcf28c7c3a2f01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71ca7fc2-b7", "ovs_interfaceid": "71ca7fc2-b706-4d5b-bf6c-da054ac2774f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1710.353523] env[67008]: DEBUG oslo_concurrency.lockutils [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] Acquired lock "refresh_cache-260ddcc7-b12c-46f9-9c98-df270b438cd2" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1710.353523] env[67008]: DEBUG nova.network.neutron [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Refreshing network info cache for port 71ca7fc2-b706-4d5b-bf6c-da054ac2774f {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1710.354447] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:ff:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71ca7fc2-b706-4d5b-bf6c-da054ac2774f', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1710.362321] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating folder: Project (ff81b1d640c340dca8bcf28c7c3a2f01). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1710.365250] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd2155e5-c1c9-4c49-9a5f-4ba9aa2e3610 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.378217] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Created folder: Project (ff81b1d640c340dca8bcf28c7c3a2f01) in parent group-v567993. [ 1710.378396] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating folder: Instances. Parent ref: group-v568083. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1710.378609] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a2e0049-7ee1-43e0-a949-27ffefc9fd93 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.387168] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Created folder: Instances in parent group-v568083. [ 1710.387386] env[67008]: DEBUG oslo.service.loopingcall [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1710.387555] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1710.387742] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f4ce62e-aa30-4147-b6b8-59e39aa00e1f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.407722] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1710.407722] env[67008]: value = "task-2824992" [ 1710.407722] env[67008]: _type = "Task" [ 1710.407722] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.414717] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824992, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.634375] env[67008]: DEBUG nova.network.neutron [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Updated VIF entry in instance network info cache for port 71ca7fc2-b706-4d5b-bf6c-da054ac2774f. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1710.634817] env[67008]: DEBUG nova.network.neutron [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Updating instance_info_cache with network_info: [{"id": "71ca7fc2-b706-4d5b-bf6c-da054ac2774f", "address": "fa:16:3e:b7:ff:75", "network": {"id": "542bddd3-3fe8-429e-8c06-3a88230d19ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-298851449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff81b1d640c340dca8bcf28c7c3a2f01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71ca7fc2-b7", "ovs_interfaceid": "71ca7fc2-b706-4d5b-bf6c-da054ac2774f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.645283] env[67008]: DEBUG oslo_concurrency.lockutils [req-ef09c1b2-0356-41ff-b103-478967d358a6 req-f2f273f8-e5c7-42f3-9642-e13238fc9bca service nova] Releasing lock "refresh_cache-260ddcc7-b12c-46f9-9c98-df270b438cd2" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1710.917798] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824992, 'name': CreateVM_Task, 'duration_secs': 0.307071} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.918116] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1710.918645] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1710.918821] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1710.919167] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1710.919412] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-120526a8-2757-4348-bdb8-bb413ce0eac5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.923618] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for the task: (returnval){ [ 1710.923618] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5246ff65-2197-4dcf-a948-02c4960e769f" [ 1710.923618] env[67008]: _type = "Task" [ 1710.923618] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.930534] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5246ff65-2197-4dcf-a948-02c4960e769f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.433786] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1711.434042] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1711.434259] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1739.857333] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1743.864737] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1746.858221] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1746.869547] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1746.869765] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1746.869932] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1746.870099] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1746.871493] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7acd94a-e93e-454a-837d-e4152c28181c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.879999] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f577fe9-c422-448e-9eee-dcd36de45d0d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.894214] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f87d8b9-4c4f-4227-974f-d9afdf368bc1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.900129] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd88024d-cae0-4a0b-bcf8-164d95f09bbb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.928352] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181071MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1746.928483] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1746.928661] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.065019] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1747.078189] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1747.078189] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1747.078189] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1747.214144] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1dee50-68d1-45f3-9c9e-1cc2d6bb5eb0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.221926] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b0fa0d-ebf0-4524-b73c-583db171c54b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.251483] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcdc0d2b-359e-4e9b-b22a-ab0f74c268d7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.258098] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1ccb4b-8e25-4df0-b0e4-7d28f4d46811 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.270429] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1747.279670] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1747.293144] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1747.293326] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.365s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1750.291747] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.851569] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.856248] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.856405] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1751.857683] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.777610] env[67008]: WARNING oslo_vmware.rw_handles [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1752.777610] env[67008]: ERROR oslo_vmware.rw_handles [ 1752.778123] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1752.780241] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1752.780492] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Copying Virtual Disk [datastore1] vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/27ac6eb9-a39b-43e6-90c1-602bf349143d/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1752.780791] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a1d9f9d-578e-42d6-9046-397c3068b8e5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.790119] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Waiting for the task: (returnval){ [ 1752.790119] env[67008]: value = "task-2824993" [ 1752.790119] env[67008]: _type = "Task" [ 1752.790119] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.797667] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Task: {'id': task-2824993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.856399] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1752.856586] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1752.856703] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1752.876797] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877250] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877250] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877519] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877642] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877763] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877891] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.877996] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.878143] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1752.878263] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1753.302039] env[67008]: DEBUG oslo_vmware.exceptions [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1753.302039] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1753.302593] env[67008]: ERROR nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1753.302593] env[67008]: Faults: ['InvalidArgument'] [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Traceback (most recent call last): [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] yield resources [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self.driver.spawn(context, instance, image_meta, [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._fetch_image_if_missing(context, vi) [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] image_cache(vi, tmp_image_ds_loc) [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] vm_util.copy_virtual_disk( [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] session._wait_for_task(vmdk_copy_task) [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.wait_for_task(task_ref) [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return evt.wait() [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] result = hub.switch() [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.greenlet.switch() [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self.f(*self.args, **self.kw) [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] raise exceptions.translate_fault(task_info.error) [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Faults: ['InvalidArgument'] [ 1753.302593] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] [ 1753.303928] env[67008]: INFO nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Terminating instance [ 1753.304445] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1753.304650] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1753.304911] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5714e224-17f5-4079-969c-672d458a797f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.306979] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1753.307154] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquired lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1753.307320] env[67008]: DEBUG nova.network.neutron [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1753.314733] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1753.314933] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1753.316119] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-834d38db-7875-4398-9df4-750937393a37 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.323495] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 1753.323495] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d09e47-5bd6-813f-c087-7dd81bee69f4" [ 1753.323495] env[67008]: _type = "Task" [ 1753.323495] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.330861] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d09e47-5bd6-813f-c087-7dd81bee69f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.341347] env[67008]: DEBUG nova.network.neutron [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1753.420013] env[67008]: DEBUG nova.network.neutron [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.428793] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Releasing lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1753.429240] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1753.429432] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1753.430553] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422e1d06-8e2c-4d44-b443-a25b78e51a17 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.438200] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1753.438418] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d4f39e9-050c-4685-88b7-d41d12a0c310 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.467642] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1753.467864] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1753.468078] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Deleting the datastore file [datastore1] df16a3f6-cf19-4baf-9cc2-4819481f5eaf {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1753.468322] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf47b42e-2cf5-4a75-97a7-ac0806782a7a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.476425] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Waiting for the task: (returnval){ [ 1753.476425] env[67008]: value = "task-2824995" [ 1753.476425] env[67008]: _type = "Task" [ 1753.476425] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.484552] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Task: {'id': task-2824995, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.833953] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1753.834253] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating directory with path [datastore1] vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1753.834454] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ee3e582-534e-4d85-9e4a-2b474b670679 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.845499] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Created directory with path [datastore1] vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1753.845678] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Fetch image to [datastore1] vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1753.845847] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1753.846550] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c9d154-c682-4bff-a510-830ced1a5888 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.852690] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c43e3a9-cdac-4b96-8b30-ae48114657ec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.856366] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1753.861606] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6d0b46-c017-4e7b-9d4c-38cee17f047e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.892736] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254104be-abd1-46a7-9df4-01a9581608a1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.898264] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e7835189-8ea5-4a28-a8e9-5d2a31b5d1e9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.918778] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1753.968250] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1754.027150] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1754.027343] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1754.031094] env[67008]: DEBUG oslo_vmware.api [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Task: {'id': task-2824995, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042097} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.031333] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1754.031519] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1754.031690] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1754.031887] env[67008]: INFO nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1754.032151] env[67008]: DEBUG oslo.service.loopingcall [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.032358] env[67008]: DEBUG nova.compute.manager [-] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network deallocation for instance since networking was not requested. {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1754.034516] env[67008]: DEBUG nova.compute.claims [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1754.034684] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1754.034898] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1754.210447] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac6b900-9de9-44be-9a55-eea7b097a330 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.219044] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f9ee72-7b71-4c61-9790-c3d976fdf5ab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.247158] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bdc6b1-46c9-4458-b687-5f7022d99d71 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.253928] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46b3603-e7f2-4028-a23e-8a6112adc283 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.266769] env[67008]: DEBUG nova.compute.provider_tree [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1754.274767] env[67008]: DEBUG nova.scheduler.client.report [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1754.288080] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1754.288615] env[67008]: ERROR nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1754.288615] env[67008]: Faults: ['InvalidArgument'] [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Traceback (most recent call last): [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self.driver.spawn(context, instance, image_meta, [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._fetch_image_if_missing(context, vi) [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] image_cache(vi, tmp_image_ds_loc) [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] vm_util.copy_virtual_disk( [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] session._wait_for_task(vmdk_copy_task) [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.wait_for_task(task_ref) [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return evt.wait() [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] result = hub.switch() [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.greenlet.switch() [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self.f(*self.args, **self.kw) [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] raise exceptions.translate_fault(task_info.error) [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Faults: ['InvalidArgument'] [ 1754.288615] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] [ 1754.290276] env[67008]: DEBUG nova.compute.utils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1754.290849] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Build of instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf was re-scheduled: A specified parameter was not correct: fileType [ 1754.290849] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1754.291226] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1754.291416] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1754.291567] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquired lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1754.291750] env[67008]: DEBUG nova.network.neutron [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1754.324393] env[67008]: DEBUG nova.network.neutron [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1754.451482] env[67008]: DEBUG nova.network.neutron [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.461642] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Releasing lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1754.461858] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1754.462478] env[67008]: DEBUG nova.compute.manager [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Skipping network deallocation for instance since networking was not requested. {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1754.549408] env[67008]: INFO nova.scheduler.client.report [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Deleted allocations for instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf [ 1754.571730] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0ed8e75c-f9cd-4ddc-8701-999c4fe30fb3 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 615.761s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1754.572808] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 419.677s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1754.573033] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1754.573237] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1754.573402] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1754.575551] env[67008]: INFO nova.compute.manager [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Terminating instance [ 1754.577099] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquiring lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1754.577259] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Acquired lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1754.577422] env[67008]: DEBUG nova.network.neutron [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1754.593360] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1754.601205] env[67008]: DEBUG nova.network.neutron [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1754.657671] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1754.657939] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1754.659404] env[67008]: INFO nova.compute.claims [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1754.662659] env[67008]: DEBUG nova.network.neutron [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.672372] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Releasing lock "refresh_cache-df16a3f6-cf19-4baf-9cc2-4819481f5eaf" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1754.672739] env[67008]: DEBUG nova.compute.manager [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1754.672945] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1754.673442] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f94497d1-6e5e-4f0d-80b1-610729319271 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.682576] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b41f6fa-9ae0-428b-8910-a3ad471e5358 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.710965] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance df16a3f6-cf19-4baf-9cc2-4819481f5eaf could not be found. [ 1754.711172] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1754.711346] env[67008]: INFO nova.compute.manager [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1754.711576] env[67008]: DEBUG oslo.service.loopingcall [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.714194] env[67008]: DEBUG nova.compute.manager [-] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1754.714296] env[67008]: DEBUG nova.network.neutron [-] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1754.816232] env[67008]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67008) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1754.816485] env[67008]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-324c4bd5-baa0-4d0b-9502-6fa772219db8'] [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1754.817031] env[67008]: ERROR oslo.service.loopingcall [ 1754.819025] env[67008]: ERROR nova.compute.manager [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1754.833659] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46874db8-1ca6-487a-b1df-bd7e09c07474 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.840796] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77c3438-06ce-4885-8057-69c6ea37e7c2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.872051] env[67008]: ERROR nova.compute.manager [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Traceback (most recent call last): [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] ret = obj(*args, **kwargs) [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] exception_handler_v20(status_code, error_body) [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] raise client_exc(message=error_message, [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Neutron server returns request_ids: ['req-324c4bd5-baa0-4d0b-9502-6fa772219db8'] [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] During handling of the above exception, another exception occurred: [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Traceback (most recent call last): [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._delete_instance(context, instance, bdms) [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._shutdown_instance(context, instance, bdms) [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._try_deallocate_network(context, instance, requested_networks) [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] with excutils.save_and_reraise_exception(): [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self.force_reraise() [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] raise self.value [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] _deallocate_network_with_retries() [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return evt.wait() [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] result = hub.switch() [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.greenlet.switch() [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] result = func(*self.args, **self.kw) [ 1754.872051] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] result = f(*args, **kwargs) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._deallocate_network( [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self.network_api.deallocate_for_instance( [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] data = neutron.list_ports(**search_opts) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] ret = obj(*args, **kwargs) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.list('ports', self.ports_path, retrieve_all, [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] ret = obj(*args, **kwargs) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] for r in self._pagination(collection, path, **params): [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] res = self.get(path, params=params) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] ret = obj(*args, **kwargs) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.retry_request("GET", action, body=body, [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] ret = obj(*args, **kwargs) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] return self.do_request(method, action, body=body, [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] ret = obj(*args, **kwargs) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] self._handle_fault_response(status_code, replybody, resp) [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1754.873230] env[67008]: ERROR nova.compute.manager [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] [ 1754.874380] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1754.875501] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91483cc-0698-44f3-8764-225e287d30df {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.882467] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0481c7-c6f0-4c9d-9661-c50dc3db691a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.895921] env[67008]: DEBUG nova.compute.provider_tree [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1754.901426] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Lock "df16a3f6-cf19-4baf-9cc2-4819481f5eaf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.329s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1754.904278] env[67008]: DEBUG nova.scheduler.client.report [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1754.916155] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.258s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1754.916596] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1754.946088] env[67008]: DEBUG nova.compute.utils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1754.948049] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1754.948049] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1754.950330] env[67008]: INFO nova.compute.manager [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] [instance: df16a3f6-cf19-4baf-9cc2-4819481f5eaf] Successfully reverted task state from None on failure for instance. [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server [None req-0402e798-bfb7-437d-823a-6e6b19972643 tempest-ServerShowV257Test-629395203 tempest-ServerShowV257Test-629395203-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-324c4bd5-baa0-4d0b-9502-6fa772219db8'] [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1754.953622] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1754.955543] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1754.957174] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1754.957174] env[67008]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1754.957174] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1754.957174] env[67008]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1754.957174] env[67008]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1754.957174] env[67008]: ERROR oslo_messaging.rpc.server [ 1754.957174] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1755.013695] env[67008]: DEBUG nova.policy [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d81494995714d4494ce972cdd9e681e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2daf47b1f7e423791c0b03878567e11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1755.016670] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1755.043931] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1755.044184] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1755.044339] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1755.044515] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1755.044658] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1755.044810] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1755.045015] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1755.045178] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1755.045339] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1755.045497] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1755.045665] env[67008]: DEBUG nova.virt.hardware [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1755.046512] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde7e455-5848-4395-9b75-4003336c98d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.054195] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e532851-8476-40d9-a852-87ac58c4e2f7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.430405] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Successfully created port: 4d7f5eac-e81a-4528-84d0-1b530692cb68 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1756.134229] env[67008]: DEBUG nova.compute.manager [req-bd2e226a-88aa-47fb-bb0c-b09060e940b8 req-52567f30-932a-45aa-9fd9-e869209fa18a service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Received event network-vif-plugged-4d7f5eac-e81a-4528-84d0-1b530692cb68 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1756.134492] env[67008]: DEBUG oslo_concurrency.lockutils [req-bd2e226a-88aa-47fb-bb0c-b09060e940b8 req-52567f30-932a-45aa-9fd9-e869209fa18a service nova] Acquiring lock "5ecc1376-aab4-4b17-8746-39bed51edbba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1756.134649] env[67008]: DEBUG oslo_concurrency.lockutils [req-bd2e226a-88aa-47fb-bb0c-b09060e940b8 req-52567f30-932a-45aa-9fd9-e869209fa18a service nova] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1756.134846] env[67008]: DEBUG oslo_concurrency.lockutils [req-bd2e226a-88aa-47fb-bb0c-b09060e940b8 req-52567f30-932a-45aa-9fd9-e869209fa18a service nova] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1756.135047] env[67008]: DEBUG nova.compute.manager [req-bd2e226a-88aa-47fb-bb0c-b09060e940b8 req-52567f30-932a-45aa-9fd9-e869209fa18a service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] No waiting events found dispatching network-vif-plugged-4d7f5eac-e81a-4528-84d0-1b530692cb68 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1756.135220] env[67008]: WARNING nova.compute.manager [req-bd2e226a-88aa-47fb-bb0c-b09060e940b8 req-52567f30-932a-45aa-9fd9-e869209fa18a service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Received unexpected event network-vif-plugged-4d7f5eac-e81a-4528-84d0-1b530692cb68 for instance with vm_state building and task_state spawning. [ 1756.210479] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Successfully updated port: 4d7f5eac-e81a-4528-84d0-1b530692cb68 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1756.222043] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "refresh_cache-5ecc1376-aab4-4b17-8746-39bed51edbba" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1756.222198] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquired lock "refresh_cache-5ecc1376-aab4-4b17-8746-39bed51edbba" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1756.222389] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1756.262396] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1756.422182] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Updating instance_info_cache with network_info: [{"id": "4d7f5eac-e81a-4528-84d0-1b530692cb68", "address": "fa:16:3e:3b:5c:9e", "network": {"id": "82026a86-5722-4042-840a-1bd892bac9d4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1653206647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2daf47b1f7e423791c0b03878567e11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d7f5eac-e8", "ovs_interfaceid": "4d7f5eac-e81a-4528-84d0-1b530692cb68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.433769] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Releasing lock "refresh_cache-5ecc1376-aab4-4b17-8746-39bed51edbba" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1756.434167] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Instance network_info: |[{"id": "4d7f5eac-e81a-4528-84d0-1b530692cb68", "address": "fa:16:3e:3b:5c:9e", "network": {"id": "82026a86-5722-4042-840a-1bd892bac9d4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1653206647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2daf47b1f7e423791c0b03878567e11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d7f5eac-e8", "ovs_interfaceid": "4d7f5eac-e81a-4528-84d0-1b530692cb68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1756.434560] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:5c:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3952eb02-1162-48ed-8227-9c138960d583', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d7f5eac-e81a-4528-84d0-1b530692cb68', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1756.442076] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Creating folder: Project (c2daf47b1f7e423791c0b03878567e11). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1756.442582] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-387956fc-1bab-4708-938a-e4837f5fd0dc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.454322] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Created folder: Project (c2daf47b1f7e423791c0b03878567e11) in parent group-v567993. [ 1756.454501] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Creating folder: Instances. Parent ref: group-v568086. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1756.454716] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6171b2ab-e3fc-4bbb-bc9d-4deea9100272 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.463554] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Created folder: Instances in parent group-v568086. [ 1756.463815] env[67008]: DEBUG oslo.service.loopingcall [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1756.463997] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1756.464208] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-771d6860-373a-410d-8130-16d1aa196772 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.482953] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1756.482953] env[67008]: value = "task-2824998" [ 1756.482953] env[67008]: _type = "Task" [ 1756.482953] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.490192] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824998, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.852727] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.995404] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2824998, 'name': CreateVM_Task, 'duration_secs': 0.419954} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.995682] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1756.996415] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1756.996637] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1756.996987] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1756.997294] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c7fe8be-e79d-4bcc-9aa7-6c5552513b29 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.001963] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Waiting for the task: (returnval){ [ 1757.001963] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5273bade-9bbb-af2b-7ca6-612151da149b" [ 1757.001963] env[67008]: _type = "Task" [ 1757.001963] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.012841] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5273bade-9bbb-af2b-7ca6-612151da149b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.512830] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1757.513191] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1757.513312] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1758.162527] env[67008]: DEBUG nova.compute.manager [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Received event network-changed-4d7f5eac-e81a-4528-84d0-1b530692cb68 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1758.162733] env[67008]: DEBUG nova.compute.manager [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Refreshing instance network info cache due to event network-changed-4d7f5eac-e81a-4528-84d0-1b530692cb68. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1758.162942] env[67008]: DEBUG oslo_concurrency.lockutils [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] Acquiring lock "refresh_cache-5ecc1376-aab4-4b17-8746-39bed51edbba" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1758.163100] env[67008]: DEBUG oslo_concurrency.lockutils [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] Acquired lock "refresh_cache-5ecc1376-aab4-4b17-8746-39bed51edbba" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1758.163260] env[67008]: DEBUG nova.network.neutron [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Refreshing network info cache for port 4d7f5eac-e81a-4528-84d0-1b530692cb68 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1758.478163] env[67008]: DEBUG nova.network.neutron [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Updated VIF entry in instance network info cache for port 4d7f5eac-e81a-4528-84d0-1b530692cb68. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1758.478531] env[67008]: DEBUG nova.network.neutron [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Updating instance_info_cache with network_info: [{"id": "4d7f5eac-e81a-4528-84d0-1b530692cb68", "address": "fa:16:3e:3b:5c:9e", "network": {"id": "82026a86-5722-4042-840a-1bd892bac9d4", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1653206647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c2daf47b1f7e423791c0b03878567e11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3952eb02-1162-48ed-8227-9c138960d583", "external-id": "nsx-vlan-transportzone-250", "segmentation_id": 250, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d7f5eac-e8", "ovs_interfaceid": "4d7f5eac-e81a-4528-84d0-1b530692cb68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.489574] env[67008]: DEBUG oslo_concurrency.lockutils [req-4b4ba9e7-461a-47d3-81f1-712e8832000e req-4de12e72-170b-4462-8201-374fc1d68e50 service nova] Releasing lock "refresh_cache-5ecc1376-aab4-4b17-8746-39bed51edbba" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1758.857566] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.857566] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances with incomplete migration {{(pid=67008) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1761.868617] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1761.868943] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1761.877818] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] There are 0 instances to clean {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1775.990211] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.009409] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Getting list of instances from cluster (obj){ [ 1776.009409] env[67008]: value = "domain-c8" [ 1776.009409] env[67008]: _type = "ClusterComputeResource" [ 1776.009409] env[67008]: } {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1776.010860] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c1e6ba-eccc-45f8-8ba0-4af03e53bc0e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.029461] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Got total of 10 instances {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1776.029623] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.029822] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 8632f87b-bab8-4df1-a403-a987b0769f8e {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.029979] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.030289] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 0c45068a-d333-4247-841d-bf40ebb779da {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.030479] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 7868f806-e64f-4964-9e1e-bcb8d29e685f {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.030634] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.030782] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 6ede77a1-aa76-4e9f-8beb-80131e7990da {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.030927] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.031244] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 260ddcc7-b12c-46f9-9c98-df270b438cd2 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.031324] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 5ecc1376-aab4-4b17-8746-39bed51edbba {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1776.031629] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.031860] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "8632f87b-bab8-4df1-a403-a987b0769f8e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.032079] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.032286] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "0c45068a-d333-4247-841d-bf40ebb779da" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.032483] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.032675] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.032866] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.033112] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.033252] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1776.033451] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "5ecc1376-aab4-4b17-8746-39bed51edbba" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1800.179760] env[67008]: WARNING oslo_vmware.rw_handles [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1800.179760] env[67008]: ERROR oslo_vmware.rw_handles [ 1800.180683] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1800.182186] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1800.182430] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Copying Virtual Disk [datastore1] vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/60e91723-f8a8-4355-8159-576dee1ae9eb/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1800.182708] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-071a7b6d-8c18-4df2-87c6-291f9289d60d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.191494] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 1800.191494] env[67008]: value = "task-2824999" [ 1800.191494] env[67008]: _type = "Task" [ 1800.191494] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.199343] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': task-2824999, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.703881] env[67008]: DEBUG oslo_vmware.exceptions [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1800.704197] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1800.704741] env[67008]: ERROR nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1800.704741] env[67008]: Faults: ['InvalidArgument'] [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Traceback (most recent call last): [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] yield resources [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self.driver.spawn(context, instance, image_meta, [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self._fetch_image_if_missing(context, vi) [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] image_cache(vi, tmp_image_ds_loc) [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] vm_util.copy_virtual_disk( [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] session._wait_for_task(vmdk_copy_task) [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] return self.wait_for_task(task_ref) [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] return evt.wait() [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] result = hub.switch() [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] return self.greenlet.switch() [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self.f(*self.args, **self.kw) [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] raise exceptions.translate_fault(task_info.error) [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Faults: ['InvalidArgument'] [ 1800.704741] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] [ 1800.705822] env[67008]: INFO nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Terminating instance [ 1800.706602] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1800.706810] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1800.707057] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3568c6a4-f5b0-4d3c-9409-c5a28605423d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.709195] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1800.709335] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1800.710056] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91ed4e1-f9c2-45e1-bcf2-5d589f40c5d5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.716543] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1800.716751] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24b31dc5-c96b-4bfc-bee3-37d79d0f078c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.718771] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1800.718937] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1800.719880] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fad2b6ae-a983-4c1f-9f61-37a1a27c3498 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.724328] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Waiting for the task: (returnval){ [ 1800.724328] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520515a0-dcd6-d116-416e-058cd0fd1dab" [ 1800.724328] env[67008]: _type = "Task" [ 1800.724328] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.731317] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520515a0-dcd6-d116-416e-058cd0fd1dab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.783971] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1800.784253] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1800.784449] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Deleting the datastore file [datastore1] efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1800.784711] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a87e6302-fb82-438d-a0d5-018c6b5d6275 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.790841] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 1800.790841] env[67008]: value = "task-2825001" [ 1800.790841] env[67008]: _type = "Task" [ 1800.790841] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.798322] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': task-2825001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.967327] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1801.237314] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1801.237314] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Creating directory with path [datastore1] vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1801.237314] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7665dc20-aa06-4b32-add2-9ea7629ef4a4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.249034] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Created directory with path [datastore1] vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1801.249034] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Fetch image to [datastore1] vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1801.249034] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1801.249296] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffecbc85-8fa5-4115-8812-745f1201218a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.255710] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48057e36-7d7a-48e6-a799-a900cc632177 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.264739] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef84313d-b99c-4712-a89b-55c931be4c99 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.299371] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b780f3-3e68-4c56-9f4a-836ae91029c2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.306189] env[67008]: DEBUG oslo_vmware.api [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': task-2825001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077286} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.307623] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1801.307810] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1801.307978] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1801.308166] env[67008]: INFO nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1801.309885] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-74c53785-523f-44df-8d3d-71b74f3f0626 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.311724] env[67008]: DEBUG nova.compute.claims [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1801.311897] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1801.312133] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1801.335024] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1801.389413] env[67008]: DEBUG oslo_vmware.rw_handles [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1801.447966] env[67008]: DEBUG oslo_vmware.rw_handles [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1801.448163] env[67008]: DEBUG oslo_vmware.rw_handles [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1801.526109] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9069e29d-3aba-4d9b-8595-35596ec363d2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.533534] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c362d1f-80d0-4a98-a229-ab6f4e32737d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.563616] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8d3022-ca8f-4d1b-87a5-b8859bf68fcb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.570169] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2a8b58-ee50-41c0-b3a3-d5df70cece27 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.582844] env[67008]: DEBUG nova.compute.provider_tree [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1801.591462] env[67008]: DEBUG nova.scheduler.client.report [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1801.605361] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1801.605882] env[67008]: ERROR nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1801.605882] env[67008]: Faults: ['InvalidArgument'] [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Traceback (most recent call last): [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self.driver.spawn(context, instance, image_meta, [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self._fetch_image_if_missing(context, vi) [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] image_cache(vi, tmp_image_ds_loc) [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] vm_util.copy_virtual_disk( [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] session._wait_for_task(vmdk_copy_task) [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] return self.wait_for_task(task_ref) [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] return evt.wait() [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] result = hub.switch() [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] return self.greenlet.switch() [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] self.f(*self.args, **self.kw) [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] raise exceptions.translate_fault(task_info.error) [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Faults: ['InvalidArgument'] [ 1801.605882] env[67008]: ERROR nova.compute.manager [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] [ 1801.606743] env[67008]: DEBUG nova.compute.utils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1801.607965] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Build of instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 was re-scheduled: A specified parameter was not correct: fileType [ 1801.607965] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1801.608367] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1801.608542] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1801.608721] env[67008]: DEBUG nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1801.608885] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1801.867330] env[67008]: DEBUG nova.network.neutron [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.880305] env[67008]: INFO nova.compute.manager [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Took 0.27 seconds to deallocate network for instance. [ 1801.988765] env[67008]: INFO nova.scheduler.client.report [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Deleted allocations for instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 [ 1802.012060] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a20854a3-7249-42ed-922f-4fc27aa98ec5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 591.461s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1802.012380] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 395.567s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1802.012602] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1802.012809] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1802.012978] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1802.014840] env[67008]: INFO nova.compute.manager [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Terminating instance [ 1802.016821] env[67008]: DEBUG nova.compute.manager [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1802.017025] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1802.017286] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7be68a4d-11f7-4ba0-b7ec-d89c9b1df9e0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.027537] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085ea16e-c36f-4f42-afe2-a1355589a60d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.057086] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3 could not be found. [ 1802.057312] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1802.057489] env[67008]: INFO nova.compute.manager [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1802.057723] env[67008]: DEBUG oslo.service.loopingcall [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.057944] env[67008]: DEBUG nova.compute.manager [-] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1802.058053] env[67008]: DEBUG nova.network.neutron [-] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1802.081638] env[67008]: DEBUG nova.network.neutron [-] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.089506] env[67008]: INFO nova.compute.manager [-] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] Took 0.03 seconds to deallocate network for instance. [ 1802.178680] env[67008]: DEBUG oslo_concurrency.lockutils [None req-4256c9f0-75b5-4b04-b1a9-63c056dbc5d5 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1802.179523] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 26.148s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1802.179713] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1802.179885] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "efafe4ff-ce0c-44e0-8bf8-5539bd9ca4f3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1803.900979] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.857820] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.869057] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1808.869272] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1808.869446] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1808.869595] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1808.870666] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2690d3e0-eee4-4db0-878d-1b19500f3684 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.879339] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3528f1b1-fb36-4da4-9124-11d86146f00f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.893329] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fb80b9-19f6-4935-a73f-d106a72718ac {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.899506] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca85a11d-3605-41a4-a735-64922eeecf57 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.927913] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181054MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1808.928071] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1808.928265] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1808.997966] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 8632f87b-bab8-4df1-a403-a987b0769f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998156] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998288] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998411] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998528] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998642] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998755] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998867] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.998977] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1808.999179] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1808.999318] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1809.015607] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing inventories for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1809.031183] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Updating ProviderTree inventory for provider ad100a41-192a-4a03-bdd9-0a78ce856705 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1809.031400] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Updating inventory in ProviderTree for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1809.042501] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing aggregate associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, aggregates: None {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1809.060427] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing trait associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1809.169410] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042b263d-5c6d-4f3f-be8e-b09f22eb0b33 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.177173] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcac59b9-1cd5-4a2a-9d8a-00b8d9d7fae2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.207691] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b14413-ea68-4b01-a0fc-fda5394d6f85 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.214701] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a4c9bf-a913-4157-b67e-9bfa13e2c724 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.227673] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.236847] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1809.249932] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1809.250128] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.322s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1811.249544] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.250083] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.250083] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1811.857634] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.852402] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.856038] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.856205] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1812.856331] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1812.874733] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.874896] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875063] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875203] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875326] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875445] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875563] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875677] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875791] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1812.875907] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1814.856938] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1816.856649] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.955703] env[67008]: DEBUG oslo_concurrency.lockutils [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1838.285490] env[67008]: DEBUG oslo_concurrency.lockutils [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "5ecc1376-aab4-4b17-8746-39bed51edbba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1847.991158] env[67008]: WARNING oslo_vmware.rw_handles [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1847.991158] env[67008]: ERROR oslo_vmware.rw_handles [ 1847.991936] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1847.993554] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1847.993797] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Copying Virtual Disk [datastore1] vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/677499ff-be39-459f-9920-895d487f37dd/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1847.994093] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dc7973c-38ca-43e7-89f7-3b82133d99aa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.002182] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Waiting for the task: (returnval){ [ 1848.002182] env[67008]: value = "task-2825002" [ 1848.002182] env[67008]: _type = "Task" [ 1848.002182] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.009715] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Task: {'id': task-2825002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.513384] env[67008]: DEBUG oslo_vmware.exceptions [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1848.513792] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1848.514243] env[67008]: ERROR nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1848.514243] env[67008]: Faults: ['InvalidArgument'] [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Traceback (most recent call last): [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] yield resources [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self.driver.spawn(context, instance, image_meta, [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self._fetch_image_if_missing(context, vi) [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] image_cache(vi, tmp_image_ds_loc) [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] vm_util.copy_virtual_disk( [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] session._wait_for_task(vmdk_copy_task) [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] return self.wait_for_task(task_ref) [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] return evt.wait() [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] result = hub.switch() [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] return self.greenlet.switch() [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self.f(*self.args, **self.kw) [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] raise exceptions.translate_fault(task_info.error) [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Faults: ['InvalidArgument'] [ 1848.514243] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] [ 1848.515384] env[67008]: INFO nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Terminating instance [ 1848.516174] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1848.516380] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1848.516627] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-378fc20f-440c-4835-814b-73a1f7e8df36 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.519045] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1848.519241] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1848.519967] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69643ba-b3f1-45d4-80e5-16437aa5cbdd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.526523] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1848.526722] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1db72a2d-91e1-48ce-898e-eb8c3b727134 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.528832] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1848.529013] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1848.529948] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-164756f7-498c-4dba-b941-670bfce32a0f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.534696] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Waiting for the task: (returnval){ [ 1848.534696] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522e58f8-dc28-c646-9123-3d9ffb44181a" [ 1848.534696] env[67008]: _type = "Task" [ 1848.534696] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.541725] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522e58f8-dc28-c646-9123-3d9ffb44181a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.592196] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1848.592423] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1848.592625] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Deleting the datastore file [datastore1] 8632f87b-bab8-4df1-a403-a987b0769f8e {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1848.592889] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-910603bd-d118-49b1-882a-789323e18c77 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.599226] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Waiting for the task: (returnval){ [ 1848.599226] env[67008]: value = "task-2825004" [ 1848.599226] env[67008]: _type = "Task" [ 1848.599226] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.606703] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Task: {'id': task-2825004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.044519] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1849.044897] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Creating directory with path [datastore1] vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1849.045033] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ef152f7-d997-4d84-a212-261d6a781fa5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.055842] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Created directory with path [datastore1] vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1849.056032] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Fetch image to [datastore1] vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1849.056205] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1849.056892] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba6a956-82f5-467a-8e72-885ef97f045b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.063200] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03980af-ea73-45c0-a536-2d7b65fae7b3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.071907] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d819ff2-d091-4c2b-8177-4698c9d9ffe4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.106837] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b7c203-c95a-4849-b069-997e4c8f73a8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.114924] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-35ce3b50-8ce9-4604-a80c-fba864ef5596 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.116553] env[67008]: DEBUG oslo_vmware.api [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Task: {'id': task-2825004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073481} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.116789] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1849.116959] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1849.117138] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1849.117306] env[67008]: INFO nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1849.119344] env[67008]: DEBUG nova.compute.claims [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1849.119532] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1849.119748] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1849.138047] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1849.188072] env[67008]: DEBUG oslo_vmware.rw_handles [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1849.247348] env[67008]: DEBUG oslo_vmware.rw_handles [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1849.247348] env[67008]: DEBUG oslo_vmware.rw_handles [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1849.323932] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034b2545-38ef-4679-881c-d60a503e55f3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.331204] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b79322-d32c-4345-80b2-779163724341 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.361086] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64b710c-bba4-4de7-9142-d1f08e220e06 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.367768] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d598b0ba-50b3-4f59-9456-609dcda8ac15 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.381241] env[67008]: DEBUG nova.compute.provider_tree [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.389561] env[67008]: DEBUG nova.scheduler.client.report [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1849.405291] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.285s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1849.405793] env[67008]: ERROR nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1849.405793] env[67008]: Faults: ['InvalidArgument'] [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Traceback (most recent call last): [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self.driver.spawn(context, instance, image_meta, [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self._fetch_image_if_missing(context, vi) [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] image_cache(vi, tmp_image_ds_loc) [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] vm_util.copy_virtual_disk( [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] session._wait_for_task(vmdk_copy_task) [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] return self.wait_for_task(task_ref) [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] return evt.wait() [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] result = hub.switch() [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] return self.greenlet.switch() [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] self.f(*self.args, **self.kw) [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] raise exceptions.translate_fault(task_info.error) [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Faults: ['InvalidArgument'] [ 1849.405793] env[67008]: ERROR nova.compute.manager [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] [ 1849.406827] env[67008]: DEBUG nova.compute.utils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1849.407844] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Build of instance 8632f87b-bab8-4df1-a403-a987b0769f8e was re-scheduled: A specified parameter was not correct: fileType [ 1849.407844] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1849.408244] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1849.408416] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1849.408584] env[67008]: DEBUG nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1849.408747] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1850.114136] env[67008]: DEBUG nova.network.neutron [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.124196] env[67008]: INFO nova.compute.manager [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Took 0.72 seconds to deallocate network for instance. [ 1850.232838] env[67008]: INFO nova.scheduler.client.report [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Deleted allocations for instance 8632f87b-bab8-4df1-a403-a987b0769f8e [ 1850.253725] env[67008]: DEBUG oslo_concurrency.lockutils [None req-88a0fd2b-3d9f-4b20-9afa-b2971bb68702 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.570s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1850.253988] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.348s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1850.254215] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Acquiring lock "8632f87b-bab8-4df1-a403-a987b0769f8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1850.254421] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1850.254621] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1850.257023] env[67008]: INFO nova.compute.manager [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Terminating instance [ 1850.258744] env[67008]: DEBUG nova.compute.manager [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1850.258957] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1850.259451] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f6688b8-9006-44f3-b6e5-66ca7d1d772c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.269272] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17618a8f-9019-48d3-8d07-0eb851e55241 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.297402] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8632f87b-bab8-4df1-a403-a987b0769f8e could not be found. [ 1850.297608] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1850.297785] env[67008]: INFO nova.compute.manager [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1850.298036] env[67008]: DEBUG oslo.service.loopingcall [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1850.298271] env[67008]: DEBUG nova.compute.manager [-] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1850.298371] env[67008]: DEBUG nova.network.neutron [-] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1850.328470] env[67008]: DEBUG nova.network.neutron [-] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.336398] env[67008]: INFO nova.compute.manager [-] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] Took 0.04 seconds to deallocate network for instance. [ 1850.413095] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a3d54ab6-28b3-457c-8362-be2e7910f383 tempest-ServerRescueTestJSON-542889244 tempest-ServerRescueTestJSON-542889244-project-member] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1850.413908] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 74.382s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1850.414114] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 8632f87b-bab8-4df1-a403-a987b0769f8e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1850.414290] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "8632f87b-bab8-4df1-a403-a987b0769f8e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1864.857979] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1870.857576] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1870.873392] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1870.873854] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1870.873854] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1870.873980] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1870.876714] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc39339-a155-48be-acd2-3b578076d430 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.890566] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b5ebf4-fe5e-44eb-8a96-43a7c8a57ec7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.906864] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a384f2d-739a-4e81-918d-1c2573306b2e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.913654] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bce31bb-1d14-477d-834d-e2af439678e3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.942797] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181038MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1870.943038] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1870.943171] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1871.030221] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.030381] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.030511] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.030623] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.030740] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.030926] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.030982] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.031084] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1871.031272] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1871.031406] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1871.147516] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89052a9-3926-4892-97a0-c4c301875e66 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.156021] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38cdec7-ce61-47a1-abe0-7d0ca9178f7c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.187165] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9f6324-d913-460f-8c41-93586e031946 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.194248] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5523db-9f38-4122-ac3e-a20cf230a820 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.207225] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1871.215678] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1871.229530] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1871.229731] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.287s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1871.343167] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "54e33842-40a5-48e5-8813-f2da4f9fc152" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1871.343389] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1871.357841] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1871.406813] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1871.407089] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1871.408883] env[67008]: INFO nova.compute.claims [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1871.597822] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2120e484-771e-4296-ad01-716c34352b1d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.606461] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d446d5c-0142-4a32-a9c4-68f80591fcd7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.643318] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e26a43f-fd40-4f0c-aac3-f819d9b867ec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.654512] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "64ed3874-13e7-495e-9676-1757f27a1256" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1871.654793] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "64ed3874-13e7-495e-9676-1757f27a1256" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1871.657665] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c41a35-c95f-4f48-8849-14fc9f51f552 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.674290] env[67008]: DEBUG nova.compute.provider_tree [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1871.677768] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1871.684204] env[67008]: DEBUG nova.scheduler.client.report [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1871.698372] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1871.698899] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1871.742973] env[67008]: DEBUG nova.compute.utils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1871.748020] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1871.748020] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1871.753641] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1871.762051] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1871.762307] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1871.764034] env[67008]: INFO nova.compute.claims [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1871.820901] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1871.850971] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1871.851344] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1871.851440] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1871.851592] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1871.851790] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1871.851880] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1871.852135] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1871.852329] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1871.852501] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1871.852693] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1871.852899] env[67008]: DEBUG nova.virt.hardware [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1871.854101] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cecc21-854e-4a68-a2f6-d91d5c3071b0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.863933] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e049357e-6043-4d09-b99f-f0d96162de5f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.885034] env[67008]: DEBUG nova.policy [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4aaef0ae8e6f4b44bdade57ecabd89c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9c416d00db7419cb441a28d6db8771e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1871.991933] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787f4cfa-e3ff-4970-b07c-919eed730b2a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.999591] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee106944-3737-4744-a452-08fdc8704d2c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.029529] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e695e3-9451-457b-b83e-a63b68c55b3b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.036689] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96eeb84e-8e0f-432d-8f2a-c1abcafe981a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.050112] env[67008]: DEBUG nova.compute.provider_tree [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.060164] env[67008]: DEBUG nova.scheduler.client.report [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1872.083801] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.321s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1872.084305] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1872.121171] env[67008]: DEBUG nova.compute.utils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1872.122410] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1872.122625] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1872.157636] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1872.189072] env[67008]: DEBUG nova.policy [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4aaef0ae8e6f4b44bdade57ecabd89c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9c416d00db7419cb441a28d6db8771e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1872.202269] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1872.202844] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1872.229036] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.229265] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.229413] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1872.234178] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1872.259490] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1872.259733] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1872.259891] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1872.260082] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1872.260230] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1872.260377] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1872.260577] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1872.260732] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1872.260893] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1872.261061] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1872.261235] env[67008]: DEBUG nova.virt.hardware [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1872.262251] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29188df-fa39-4d02-84af-ba0d677e3d03 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.270793] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa1facd-7ad0-4934-8de8-af8326f6a191 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.469970] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Successfully created port: 10906cfd-3378-4f94-8514-f9b0bbeaa6b7 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1872.476238] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Successfully created port: 71a23169-97f0-4895-9698-8180c94de576 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1872.852714] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.009669] env[67008]: DEBUG nova.compute.manager [req-47f07983-133a-448e-92eb-8eba0b45ef27 req-19519274-ae3d-43f8-b08b-ee5c78fdda8c service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Received event network-vif-plugged-71a23169-97f0-4895-9698-8180c94de576 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1873.010375] env[67008]: DEBUG oslo_concurrency.lockutils [req-47f07983-133a-448e-92eb-8eba0b45ef27 req-19519274-ae3d-43f8-b08b-ee5c78fdda8c service nova] Acquiring lock "64ed3874-13e7-495e-9676-1757f27a1256-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1873.010617] env[67008]: DEBUG oslo_concurrency.lockutils [req-47f07983-133a-448e-92eb-8eba0b45ef27 req-19519274-ae3d-43f8-b08b-ee5c78fdda8c service nova] Lock "64ed3874-13e7-495e-9676-1757f27a1256-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1873.010772] env[67008]: DEBUG oslo_concurrency.lockutils [req-47f07983-133a-448e-92eb-8eba0b45ef27 req-19519274-ae3d-43f8-b08b-ee5c78fdda8c service nova] Lock "64ed3874-13e7-495e-9676-1757f27a1256-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1873.010943] env[67008]: DEBUG nova.compute.manager [req-47f07983-133a-448e-92eb-8eba0b45ef27 req-19519274-ae3d-43f8-b08b-ee5c78fdda8c service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] No waiting events found dispatching network-vif-plugged-71a23169-97f0-4895-9698-8180c94de576 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1873.011561] env[67008]: WARNING nova.compute.manager [req-47f07983-133a-448e-92eb-8eba0b45ef27 req-19519274-ae3d-43f8-b08b-ee5c78fdda8c service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Received unexpected event network-vif-plugged-71a23169-97f0-4895-9698-8180c94de576 for instance with vm_state building and task_state spawning. [ 1873.095938] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Successfully updated port: 71a23169-97f0-4895-9698-8180c94de576 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1873.111884] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "refresh_cache-64ed3874-13e7-495e-9676-1757f27a1256" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1873.112075] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "refresh_cache-64ed3874-13e7-495e-9676-1757f27a1256" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1873.112230] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1873.121702] env[67008]: DEBUG nova.compute.manager [req-dfacbce1-ee29-48ae-8416-465e3493a29d req-194a1b14-cd5b-44b9-9d8f-e8aea665f5ad service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Received event network-vif-plugged-10906cfd-3378-4f94-8514-f9b0bbeaa6b7 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1873.123016] env[67008]: DEBUG oslo_concurrency.lockutils [req-dfacbce1-ee29-48ae-8416-465e3493a29d req-194a1b14-cd5b-44b9-9d8f-e8aea665f5ad service nova] Acquiring lock "54e33842-40a5-48e5-8813-f2da4f9fc152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1873.123016] env[67008]: DEBUG oslo_concurrency.lockutils [req-dfacbce1-ee29-48ae-8416-465e3493a29d req-194a1b14-cd5b-44b9-9d8f-e8aea665f5ad service nova] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1873.123016] env[67008]: DEBUG oslo_concurrency.lockutils [req-dfacbce1-ee29-48ae-8416-465e3493a29d req-194a1b14-cd5b-44b9-9d8f-e8aea665f5ad service nova] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1873.123016] env[67008]: DEBUG nova.compute.manager [req-dfacbce1-ee29-48ae-8416-465e3493a29d req-194a1b14-cd5b-44b9-9d8f-e8aea665f5ad service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] No waiting events found dispatching network-vif-plugged-10906cfd-3378-4f94-8514-f9b0bbeaa6b7 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1873.123016] env[67008]: WARNING nova.compute.manager [req-dfacbce1-ee29-48ae-8416-465e3493a29d req-194a1b14-cd5b-44b9-9d8f-e8aea665f5ad service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Received unexpected event network-vif-plugged-10906cfd-3378-4f94-8514-f9b0bbeaa6b7 for instance with vm_state building and task_state spawning. [ 1873.159126] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1873.206342] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Successfully updated port: 10906cfd-3378-4f94-8514-f9b0bbeaa6b7 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1873.214929] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "refresh_cache-54e33842-40a5-48e5-8813-f2da4f9fc152" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1873.215082] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "refresh_cache-54e33842-40a5-48e5-8813-f2da4f9fc152" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1873.215230] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1873.254030] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1873.318929] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Updating instance_info_cache with network_info: [{"id": "71a23169-97f0-4895-9698-8180c94de576", "address": "fa:16:3e:7f:b6:be", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a23169-97", "ovs_interfaceid": "71a23169-97f0-4895-9698-8180c94de576", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.331411] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "refresh_cache-64ed3874-13e7-495e-9676-1757f27a1256" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1873.331727] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Instance network_info: |[{"id": "71a23169-97f0-4895-9698-8180c94de576", "address": "fa:16:3e:7f:b6:be", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a23169-97", "ovs_interfaceid": "71a23169-97f0-4895-9698-8180c94de576", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1873.332073] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:b6:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71a23169-97f0-4895-9698-8180c94de576', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1873.339405] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating folder: Project (c9c416d00db7419cb441a28d6db8771e). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1873.339909] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c663a8d-3f39-461c-94b5-fbf3699de048 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.350864] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created folder: Project (c9c416d00db7419cb441a28d6db8771e) in parent group-v567993. [ 1873.351059] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating folder: Instances. Parent ref: group-v568089. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1873.351279] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-363385f8-88f4-41d8-aa78-588e92402276 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.359459] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created folder: Instances in parent group-v568089. [ 1873.359672] env[67008]: DEBUG oslo.service.loopingcall [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.359848] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1873.360046] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b694619-f879-4224-80e9-3440a9c4dd19 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.380565] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1873.380565] env[67008]: value = "task-2825007" [ 1873.380565] env[67008]: _type = "Task" [ 1873.380565] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.388105] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825007, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.426502] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Updating instance_info_cache with network_info: [{"id": "10906cfd-3378-4f94-8514-f9b0bbeaa6b7", "address": "fa:16:3e:82:2d:37", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10906cfd-33", "ovs_interfaceid": "10906cfd-3378-4f94-8514-f9b0bbeaa6b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.437980] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "refresh_cache-54e33842-40a5-48e5-8813-f2da4f9fc152" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1873.438358] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Instance network_info: |[{"id": "10906cfd-3378-4f94-8514-f9b0bbeaa6b7", "address": "fa:16:3e:82:2d:37", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10906cfd-33", "ovs_interfaceid": "10906cfd-3378-4f94-8514-f9b0bbeaa6b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1873.439061] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:2d:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10906cfd-3378-4f94-8514-f9b0bbeaa6b7', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1873.447369] env[67008]: DEBUG oslo.service.loopingcall [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.447892] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1873.448289] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de9c91cb-56b5-4b2c-83a8-e9659705b5e7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.469952] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1873.469952] env[67008]: value = "task-2825008" [ 1873.469952] env[67008]: _type = "Task" [ 1873.469952] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.479548] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825008, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.856947] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.890650] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825007, 'name': CreateVM_Task, 'duration_secs': 0.302291} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.890816] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1873.900826] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1873.901008] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1873.901373] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1873.901629] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1a37960-7cde-44fd-a416-36b8eeb64eea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.906201] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 1873.906201] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d9860c-bc9c-609a-719a-3ad0fae8095b" [ 1873.906201] env[67008]: _type = "Task" [ 1873.906201] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.916277] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d9860c-bc9c-609a-719a-3ad0fae8095b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.979201] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825008, 'name': CreateVM_Task, 'duration_secs': 0.289177} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.979368] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1873.979982] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1874.417023] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1874.417388] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1874.417435] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1874.417633] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1874.417945] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1874.418210] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e24474b3-393d-444c-87ff-650e62954542 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.422378] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 1874.422378] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5250bf09-27c1-8ab2-cb17-b88dc4dff6e0" [ 1874.422378] env[67008]: _type = "Task" [ 1874.422378] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.429645] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5250bf09-27c1-8ab2-cb17-b88dc4dff6e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.857199] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1874.857375] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1874.857499] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1874.878759] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.878915] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879032] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879163] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879287] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879408] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879527] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879645] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879766] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879882] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1874.879999] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1874.932866] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1874.933154] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1874.933379] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1875.036090] env[67008]: DEBUG nova.compute.manager [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Received event network-changed-71a23169-97f0-4895-9698-8180c94de576 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1875.036228] env[67008]: DEBUG nova.compute.manager [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Refreshing instance network info cache due to event network-changed-71a23169-97f0-4895-9698-8180c94de576. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1875.036419] env[67008]: DEBUG oslo_concurrency.lockutils [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] Acquiring lock "refresh_cache-64ed3874-13e7-495e-9676-1757f27a1256" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1875.036563] env[67008]: DEBUG oslo_concurrency.lockutils [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] Acquired lock "refresh_cache-64ed3874-13e7-495e-9676-1757f27a1256" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1875.036720] env[67008]: DEBUG nova.network.neutron [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Refreshing network info cache for port 71a23169-97f0-4895-9698-8180c94de576 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1875.152381] env[67008]: DEBUG nova.compute.manager [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Received event network-changed-10906cfd-3378-4f94-8514-f9b0bbeaa6b7 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1875.152381] env[67008]: DEBUG nova.compute.manager [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Refreshing instance network info cache due to event network-changed-10906cfd-3378-4f94-8514-f9b0bbeaa6b7. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1875.152548] env[67008]: DEBUG oslo_concurrency.lockutils [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] Acquiring lock "refresh_cache-54e33842-40a5-48e5-8813-f2da4f9fc152" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1875.152709] env[67008]: DEBUG oslo_concurrency.lockutils [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] Acquired lock "refresh_cache-54e33842-40a5-48e5-8813-f2da4f9fc152" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1875.152869] env[67008]: DEBUG nova.network.neutron [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Refreshing network info cache for port 10906cfd-3378-4f94-8514-f9b0bbeaa6b7 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1875.371111] env[67008]: DEBUG nova.network.neutron [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Updated VIF entry in instance network info cache for port 71a23169-97f0-4895-9698-8180c94de576. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1875.371449] env[67008]: DEBUG nova.network.neutron [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Updating instance_info_cache with network_info: [{"id": "71a23169-97f0-4895-9698-8180c94de576", "address": "fa:16:3e:7f:b6:be", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71a23169-97", "ovs_interfaceid": "71a23169-97f0-4895-9698-8180c94de576", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.380669] env[67008]: DEBUG oslo_concurrency.lockutils [req-e6716d21-3d78-4d02-bb93-423c9b1a9f80 req-6ab3c66b-0f3f-4bc0-82cf-1643a64ea943 service nova] Releasing lock "refresh_cache-64ed3874-13e7-495e-9676-1757f27a1256" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1875.397437] env[67008]: DEBUG nova.network.neutron [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Updated VIF entry in instance network info cache for port 10906cfd-3378-4f94-8514-f9b0bbeaa6b7. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1875.397751] env[67008]: DEBUG nova.network.neutron [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Updating instance_info_cache with network_info: [{"id": "10906cfd-3378-4f94-8514-f9b0bbeaa6b7", "address": "fa:16:3e:82:2d:37", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10906cfd-33", "ovs_interfaceid": "10906cfd-3378-4f94-8514-f9b0bbeaa6b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.406136] env[67008]: DEBUG oslo_concurrency.lockutils [req-ff848009-6a11-4324-8e25-0cc921308f85 req-cdb6fb3c-58c0-4c22-b6c2-1d980db43bcc service nova] Releasing lock "refresh_cache-54e33842-40a5-48e5-8813-f2da4f9fc152" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1876.856571] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1876.856850] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1880.853422] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.826068] env[67008]: WARNING oslo_vmware.rw_handles [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1897.826068] env[67008]: ERROR oslo_vmware.rw_handles [ 1897.826068] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1897.830017] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1897.830017] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Copying Virtual Disk [datastore1] vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/2352ceaf-cb3f-4003-9cb5-7cc828de31da/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1897.830017] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15981452-81ea-4ba1-ab27-d13fa5d2291b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.839501] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Waiting for the task: (returnval){ [ 1897.839501] env[67008]: value = "task-2825009" [ 1897.839501] env[67008]: _type = "Task" [ 1897.839501] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.848616] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Task: {'id': task-2825009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.350808] env[67008]: DEBUG oslo_vmware.exceptions [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1898.351773] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1898.351928] env[67008]: ERROR nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1898.351928] env[67008]: Faults: ['InvalidArgument'] [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Traceback (most recent call last): [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] yield resources [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self.driver.spawn(context, instance, image_meta, [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._fetch_image_if_missing(context, vi) [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] image_cache(vi, tmp_image_ds_loc) [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] vm_util.copy_virtual_disk( [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] session._wait_for_task(vmdk_copy_task) [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.wait_for_task(task_ref) [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return evt.wait() [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] result = hub.switch() [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.greenlet.switch() [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self.f(*self.args, **self.kw) [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] raise exceptions.translate_fault(task_info.error) [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Faults: ['InvalidArgument'] [ 1898.351928] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] [ 1898.353117] env[67008]: INFO nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Terminating instance [ 1898.354432] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1898.354432] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1898.354432] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2357fa8-fda5-45d5-a7ef-16ac8fec6864 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.356366] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1898.356525] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquired lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1898.356689] env[67008]: DEBUG nova.network.neutron [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1898.363669] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1898.363840] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1898.364554] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c8b9b16-d2be-446d-a18f-79855f1ac050 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.373051] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Waiting for the task: (returnval){ [ 1898.373051] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]525689ee-164d-0e55-dbc6-16813fa1b8a8" [ 1898.373051] env[67008]: _type = "Task" [ 1898.373051] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.382608] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]525689ee-164d-0e55-dbc6-16813fa1b8a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.386774] env[67008]: DEBUG nova.network.neutron [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1898.472925] env[67008]: DEBUG nova.network.neutron [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.483159] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Releasing lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1898.483557] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1898.483745] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1898.484845] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef4a7ec-b56b-4e14-a3e8-2642ebbba2ab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.493459] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1898.493680] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca878071-5d5d-45bf-8042-34245b22eb57 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.531140] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1898.531376] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1898.531559] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Deleting the datastore file [datastore1] ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1898.531821] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4de859b0-0931-43a3-b27f-6e151bcb826a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.538877] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Waiting for the task: (returnval){ [ 1898.538877] env[67008]: value = "task-2825011" [ 1898.538877] env[67008]: _type = "Task" [ 1898.538877] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.547053] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Task: {'id': task-2825011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.883986] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1898.884392] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Creating directory with path [datastore1] vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1898.884499] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81b02e17-1716-4182-bba3-9f4c2821f9dc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.896287] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Created directory with path [datastore1] vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1898.896472] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Fetch image to [datastore1] vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1898.896645] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1898.897394] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d19478a-a4ed-4c86-892e-47e45f749664 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.904122] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfe7ee2-7ae2-42f6-b18a-9c0a648753d5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.913327] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bccba49-33d8-4f3c-a9ea-47e52a558ee6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.951875] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cc6d26-0e73-4c4f-af03-8be2f3e835ac {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.959153] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f36e4fff-bb9e-4fd3-9ce5-a8500f6808cb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.981669] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1899.033988] env[67008]: DEBUG oslo_vmware.rw_handles [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1899.093121] env[67008]: DEBUG oslo_vmware.rw_handles [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1899.093297] env[67008]: DEBUG oslo_vmware.rw_handles [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1899.097815] env[67008]: DEBUG oslo_vmware.api [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Task: {'id': task-2825011, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03546} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.098061] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1899.098252] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1899.098423] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1899.098615] env[67008]: INFO nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1899.098837] env[67008]: DEBUG oslo.service.loopingcall [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1899.099039] env[67008]: DEBUG nova.compute.manager [-] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network deallocation for instance since networking was not requested. {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1899.101165] env[67008]: DEBUG nova.compute.claims [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1899.101334] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1899.101549] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1899.264149] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eab64f5-9c8d-4abe-956c-e39baf20b9d7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.272139] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853c2ac8-4ea5-4a98-957a-a2fbb57c1c68 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.301573] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4baa78cd-3b00-4e0f-8b0a-0250297286c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.308790] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df03b8c6-8eb9-4a3f-9152-14eda51218a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.323534] env[67008]: DEBUG nova.compute.provider_tree [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1899.331913] env[67008]: DEBUG nova.scheduler.client.report [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1899.345097] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.243s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1899.345607] env[67008]: ERROR nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1899.345607] env[67008]: Faults: ['InvalidArgument'] [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Traceback (most recent call last): [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self.driver.spawn(context, instance, image_meta, [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._fetch_image_if_missing(context, vi) [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] image_cache(vi, tmp_image_ds_loc) [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] vm_util.copy_virtual_disk( [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] session._wait_for_task(vmdk_copy_task) [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.wait_for_task(task_ref) [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return evt.wait() [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] result = hub.switch() [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.greenlet.switch() [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self.f(*self.args, **self.kw) [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] raise exceptions.translate_fault(task_info.error) [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Faults: ['InvalidArgument'] [ 1899.345607] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] [ 1899.346492] env[67008]: DEBUG nova.compute.utils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1899.348033] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Build of instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f was re-scheduled: A specified parameter was not correct: fileType [ 1899.348033] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1899.348430] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1899.348651] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1899.348799] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquired lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1899.348958] env[67008]: DEBUG nova.network.neutron [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1899.371270] env[67008]: DEBUG nova.network.neutron [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1899.429581] env[67008]: DEBUG nova.network.neutron [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.438268] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Releasing lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1899.438558] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1899.438715] env[67008]: DEBUG nova.compute.manager [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Skipping network deallocation for instance since networking was not requested. {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1899.523566] env[67008]: INFO nova.scheduler.client.report [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Deleted allocations for instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f [ 1899.542551] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ddeb1b44-73f5-4b42-a2d5-1ddb2eb61742 tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.654s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1899.543588] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 416.083s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1899.543806] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1899.544024] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1899.544193] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1899.546014] env[67008]: INFO nova.compute.manager [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Terminating instance [ 1899.547484] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquiring lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1899.547641] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Acquired lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1899.547802] env[67008]: DEBUG nova.network.neutron [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1899.557410] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1899.571896] env[67008]: DEBUG nova.network.neutron [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1899.608614] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1899.608854] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1899.610309] env[67008]: INFO nova.compute.claims [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1899.629902] env[67008]: DEBUG nova.network.neutron [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.637683] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Releasing lock "refresh_cache-ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1899.638101] env[67008]: DEBUG nova.compute.manager [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1899.638296] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1899.638776] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e1d7bc1-9016-4ea8-aabe-e95afa7a8741 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.649165] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37268631-902c-416f-9a2d-db2378beb877 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.681883] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f could not be found. [ 1899.682102] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1899.682277] env[67008]: INFO nova.compute.manager [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1899.682510] env[67008]: DEBUG oslo.service.loopingcall [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1899.684895] env[67008]: DEBUG nova.compute.manager [-] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1899.684999] env[67008]: DEBUG nova.network.neutron [-] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1899.791526] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc77cae5-cfc7-4346-b745-e7125f38eefa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.799989] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd88c2bd-ca00-4f6e-9940-f5ed633304c7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.830916] env[67008]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=67008) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1899.831176] env[67008]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-635b8154-880a-44e6-ab11-4696923afc99'] [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1899.831682] env[67008]: ERROR oslo.service.loopingcall [ 1899.833418] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9979428-36d8-4822-a056-5d7c36523101 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.835169] env[67008]: ERROR nova.compute.manager [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1899.843544] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79c3e5e-1209-4183-91d4-b16a61398a3a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.858167] env[67008]: DEBUG nova.compute.provider_tree [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1899.865496] env[67008]: ERROR nova.compute.manager [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Traceback (most recent call last): [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] ret = obj(*args, **kwargs) [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] exception_handler_v20(status_code, error_body) [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] raise client_exc(message=error_message, [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Neutron server returns request_ids: ['req-635b8154-880a-44e6-ab11-4696923afc99'] [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] During handling of the above exception, another exception occurred: [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Traceback (most recent call last): [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._delete_instance(context, instance, bdms) [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._shutdown_instance(context, instance, bdms) [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._try_deallocate_network(context, instance, requested_networks) [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] with excutils.save_and_reraise_exception(): [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self.force_reraise() [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] raise self.value [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] _deallocate_network_with_retries() [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return evt.wait() [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] result = hub.switch() [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.greenlet.switch() [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] result = func(*self.args, **self.kw) [ 1899.865496] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] result = f(*args, **kwargs) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._deallocate_network( [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self.network_api.deallocate_for_instance( [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] data = neutron.list_ports(**search_opts) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] ret = obj(*args, **kwargs) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.list('ports', self.ports_path, retrieve_all, [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] ret = obj(*args, **kwargs) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] for r in self._pagination(collection, path, **params): [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] res = self.get(path, params=params) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] ret = obj(*args, **kwargs) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.retry_request("GET", action, body=body, [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] ret = obj(*args, **kwargs) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] return self.do_request(method, action, body=body, [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] ret = obj(*args, **kwargs) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] self._handle_fault_response(status_code, replybody, resp) [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1899.866791] env[67008]: ERROR nova.compute.manager [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] [ 1899.868543] env[67008]: DEBUG nova.scheduler.client.report [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1899.880862] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.272s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1899.881342] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1899.904312] env[67008]: DEBUG oslo_concurrency.lockutils [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.361s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1899.905415] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 123.873s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1899.905595] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1899.905761] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1899.921827] env[67008]: DEBUG nova.compute.utils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1899.923022] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1899.923199] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1899.934268] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1899.959212] env[67008]: INFO nova.compute.manager [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] [instance: ded4b760-f19d-4f7c-ae5e-1a4fee4dbc4f] Successfully reverted task state from None on failure for instance. [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server [None req-5e4daca7-64e5-4822-ab3a-7678549ea9ce tempest-ServerShowV247Test-425813680 tempest-ServerShowV247Test-425813680-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-635b8154-880a-44e6-ab11-4696923afc99'] [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1899.964085] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server raise self.value [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1899.965725] env[67008]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1899.967384] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1899.967384] env[67008]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1899.967384] env[67008]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1899.967384] env[67008]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1899.967384] env[67008]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1899.967384] env[67008]: ERROR oslo_messaging.rpc.server [ 1899.988988] env[67008]: DEBUG nova.policy [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4aaef0ae8e6f4b44bdade57ecabd89c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9c416d00db7419cb441a28d6db8771e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 1899.995104] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1900.016025] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:29Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=192,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1900.016287] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1900.016447] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1900.016627] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1900.016772] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1900.016917] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1900.017154] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1900.017332] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1900.017772] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1900.017772] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1900.017920] env[67008]: DEBUG nova.virt.hardware [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1900.018887] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56dc205-7598-4604-b9ea-9fe860373342 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.027435] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f0047c-c140-4fb3-b0cf-32a8395af13a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.274514] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Successfully created port: eee33ed6-eca2-4b66-8132-0b936412c5f6 {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1900.887549] env[67008]: DEBUG nova.compute.manager [req-49319b4d-85b3-44a4-a604-db4afa6e52b0 req-43ba21db-6c15-4429-9ca5-0aefb153b362 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Received event network-vif-plugged-eee33ed6-eca2-4b66-8132-0b936412c5f6 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1900.887781] env[67008]: DEBUG oslo_concurrency.lockutils [req-49319b4d-85b3-44a4-a604-db4afa6e52b0 req-43ba21db-6c15-4429-9ca5-0aefb153b362 service nova] Acquiring lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1900.887992] env[67008]: DEBUG oslo_concurrency.lockutils [req-49319b4d-85b3-44a4-a604-db4afa6e52b0 req-43ba21db-6c15-4429-9ca5-0aefb153b362 service nova] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1900.888187] env[67008]: DEBUG oslo_concurrency.lockutils [req-49319b4d-85b3-44a4-a604-db4afa6e52b0 req-43ba21db-6c15-4429-9ca5-0aefb153b362 service nova] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1900.888354] env[67008]: DEBUG nova.compute.manager [req-49319b4d-85b3-44a4-a604-db4afa6e52b0 req-43ba21db-6c15-4429-9ca5-0aefb153b362 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] No waiting events found dispatching network-vif-plugged-eee33ed6-eca2-4b66-8132-0b936412c5f6 {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1900.888517] env[67008]: WARNING nova.compute.manager [req-49319b4d-85b3-44a4-a604-db4afa6e52b0 req-43ba21db-6c15-4429-9ca5-0aefb153b362 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Received unexpected event network-vif-plugged-eee33ed6-eca2-4b66-8132-0b936412c5f6 for instance with vm_state building and task_state spawning. [ 1900.972654] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Successfully updated port: eee33ed6-eca2-4b66-8132-0b936412c5f6 {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1900.987865] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "refresh_cache-3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1900.988040] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "refresh_cache-3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1900.988197] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1901.024743] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1901.190985] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Updating instance_info_cache with network_info: [{"id": "eee33ed6-eca2-4b66-8132-0b936412c5f6", "address": "fa:16:3e:15:e8:61", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeee33ed6-ec", "ovs_interfaceid": "eee33ed6-eca2-4b66-8132-0b936412c5f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.205027] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "refresh_cache-3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1901.205027] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Instance network_info: |[{"id": "eee33ed6-eca2-4b66-8132-0b936412c5f6", "address": "fa:16:3e:15:e8:61", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeee33ed6-ec", "ovs_interfaceid": "eee33ed6-eca2-4b66-8132-0b936412c5f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1901.205239] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:e8:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eee33ed6-eca2-4b66-8132-0b936412c5f6', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1901.212584] env[67008]: DEBUG oslo.service.loopingcall [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.213133] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1901.213322] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07a85c10-a964-408a-bcb6-082ba20366c8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.234098] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1901.234098] env[67008]: value = "task-2825012" [ 1901.234098] env[67008]: _type = "Task" [ 1901.234098] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.242062] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825012, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.745026] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825012, 'name': CreateVM_Task, 'duration_secs': 0.322421} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.745206] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1901.745893] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1901.746079] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1901.746392] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1901.746644] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acf02948-70eb-4411-a8c6-8ae8eeade6dd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.751136] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 1901.751136] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52056b9f-b35c-dc6d-343d-e9338a2d7f1a" [ 1901.751136] env[67008]: _type = "Task" [ 1901.751136] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.758655] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52056b9f-b35c-dc6d-343d-e9338a2d7f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.262288] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1902.262604] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1902.262749] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1902.915216] env[67008]: DEBUG nova.compute.manager [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Received event network-changed-eee33ed6-eca2-4b66-8132-0b936412c5f6 {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1902.915411] env[67008]: DEBUG nova.compute.manager [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Refreshing instance network info cache due to event network-changed-eee33ed6-eca2-4b66-8132-0b936412c5f6. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1902.915620] env[67008]: DEBUG oslo_concurrency.lockutils [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] Acquiring lock "refresh_cache-3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1902.915762] env[67008]: DEBUG oslo_concurrency.lockutils [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] Acquired lock "refresh_cache-3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1902.915934] env[67008]: DEBUG nova.network.neutron [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Refreshing network info cache for port eee33ed6-eca2-4b66-8132-0b936412c5f6 {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1903.152713] env[67008]: DEBUG nova.network.neutron [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Updated VIF entry in instance network info cache for port eee33ed6-eca2-4b66-8132-0b936412c5f6. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1903.153162] env[67008]: DEBUG nova.network.neutron [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Updating instance_info_cache with network_info: [{"id": "eee33ed6-eca2-4b66-8132-0b936412c5f6", "address": "fa:16:3e:15:e8:61", "network": {"id": "2f10120f-7c52-47f3-9b37-ad5a3ece60b6", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-239851881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9c416d00db7419cb441a28d6db8771e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeee33ed6-ec", "ovs_interfaceid": "eee33ed6-eca2-4b66-8132-0b936412c5f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.162506] env[67008]: DEBUG oslo_concurrency.lockutils [req-806a99a4-57e5-4b70-ac4d-d1c1daed1337 req-d0f9e907-0e13-4a7a-a2c0-3ede9f6fc136 service nova] Releasing lock "refresh_cache-3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1926.857039] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1930.857466] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1930.868746] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1930.868932] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1930.869112] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1930.869509] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1930.870407] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a100e72-7453-49f9-b51e-22d87834a756 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.880647] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f2e434-fbdf-4e99-ba0b-6ae9b7ab41d4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.895335] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e7f4b0-2776-4d76-b4f9-ed6eecae734a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.901575] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6beab0a-b597-4016-a5a7-cd8595ff9eb8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.930142] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181054MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1930.930279] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1930.930470] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1931.002979] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 0c45068a-d333-4247-841d-bf40ebb779da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003162] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003292] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003415] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003533] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003649] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003762] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003875] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.003981] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 64ed3874-13e7-495e-9676-1757f27a1256 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.004108] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1931.004287] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1931.004419] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1931.114363] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6b6030-c444-437d-a8dc-f8b412af6b7f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.121971] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ab2066-cf55-439f-a590-5c1ca3c97e0a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.152423] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd97ed03-40cb-4585-9aa3-ae4ae96064e9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.159166] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681d3c24-fe53-4edd-a1d7-6e129264b7d1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.172679] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1931.181390] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1931.194562] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1931.194742] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.264s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1933.189208] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1933.189502] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1933.856618] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1933.856824] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1935.856878] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1935.857245] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1935.857245] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1935.880389] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1935.880389] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1937.857970] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1937.858366] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.227906] env[67008]: WARNING oslo_vmware.rw_handles [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1945.227906] env[67008]: ERROR oslo_vmware.rw_handles [ 1945.228687] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1945.230302] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1945.230573] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Copying Virtual Disk [datastore1] vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/5247200d-8d42-4302-8c95-579d52e7f4b2/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1945.230871] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57b033b2-3ab3-4f57-9819-921b106dacec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.239457] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Waiting for the task: (returnval){ [ 1945.239457] env[67008]: value = "task-2825013" [ 1945.239457] env[67008]: _type = "Task" [ 1945.239457] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.246945] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Task: {'id': task-2825013, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.750195] env[67008]: DEBUG oslo_vmware.exceptions [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1945.750477] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1945.751028] env[67008]: ERROR nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1945.751028] env[67008]: Faults: ['InvalidArgument'] [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Traceback (most recent call last): [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] yield resources [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self.driver.spawn(context, instance, image_meta, [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self._fetch_image_if_missing(context, vi) [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] image_cache(vi, tmp_image_ds_loc) [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] vm_util.copy_virtual_disk( [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] session._wait_for_task(vmdk_copy_task) [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] return self.wait_for_task(task_ref) [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] return evt.wait() [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] result = hub.switch() [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] return self.greenlet.switch() [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self.f(*self.args, **self.kw) [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] raise exceptions.translate_fault(task_info.error) [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Faults: ['InvalidArgument'] [ 1945.751028] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] [ 1945.752225] env[67008]: INFO nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Terminating instance [ 1945.752893] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1945.753132] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1945.753378] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eef389c1-ed2b-41d2-b96d-da536768ffb3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.755636] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1945.755791] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquired lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1945.755982] env[67008]: DEBUG nova.network.neutron [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1945.762685] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1945.762841] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1945.764053] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0898efb4-d654-4df9-b289-ee077423a2b0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.771018] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1945.771018] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5221865f-b93d-26ec-4698-6d7059ff787b" [ 1945.771018] env[67008]: _type = "Task" [ 1945.771018] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.778858] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5221865f-b93d-26ec-4698-6d7059ff787b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.785411] env[67008]: DEBUG nova.network.neutron [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1945.845773] env[67008]: DEBUG nova.network.neutron [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.854039] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Releasing lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1945.854429] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1945.854620] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1945.855671] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e911048-36d0-4370-b112-21650433ba46 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.863289] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1945.863505] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-007b5e4d-2a1f-4640-9898-529b28c3d602 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.892448] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1945.892448] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1945.892448] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Deleting the datastore file [datastore1] 0c45068a-d333-4247-841d-bf40ebb779da {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1945.892752] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fed52ec-8940-411c-ab11-dc1145b10ad2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.898523] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Waiting for the task: (returnval){ [ 1945.898523] env[67008]: value = "task-2825015" [ 1945.898523] env[67008]: _type = "Task" [ 1945.898523] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.905891] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Task: {'id': task-2825015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.283946] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1946.284302] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating directory with path [datastore1] vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1946.284463] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83830f63-be64-45e3-b416-5c02da53f594 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.296388] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Created directory with path [datastore1] vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1946.296603] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Fetch image to [datastore1] vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1946.296809] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1946.297604] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb7ff78-4264-43a9-9f08-b6d878bb6cbe {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.304463] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fc9206-baaf-4a4d-884a-7136f895a79e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.314156] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5854a45-94d1-47b7-b220-10001c6e8e46 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.347789] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db11d9d5-9b59-47bb-9a9e-a628b4be7dfa {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.353511] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fc06de0d-3026-49ea-a478-1bf062608ce1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.373409] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1946.408026] env[67008]: DEBUG oslo_vmware.api [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Task: {'id': task-2825015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.046295} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.408279] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1946.408464] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1946.408633] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1946.408801] env[67008]: INFO nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Took 0.55 seconds to destroy the instance on the hypervisor. [ 1946.409044] env[67008]: DEBUG oslo.service.loopingcall [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1946.409255] env[67008]: DEBUG nova.compute.manager [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network deallocation for instance since networking was not requested. {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1946.411309] env[67008]: DEBUG nova.compute.claims [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1946.411470] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1946.411681] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1946.523450] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1946.592215] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1946.592625] env[67008]: DEBUG oslo_vmware.rw_handles [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1946.624868] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f447e0c-1059-4993-8711-912310a2796f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.631944] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d481e8cf-0be0-4485-9146-18caacfd251a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.662394] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b64c2b-c6b4-4414-9a9c-bce0719b19ea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.669009] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2845309-bab1-4df1-b637-79c5b007e7c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.681622] env[67008]: DEBUG nova.compute.provider_tree [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1946.690766] env[67008]: DEBUG nova.scheduler.client.report [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1946.703648] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.292s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1946.704185] env[67008]: ERROR nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1946.704185] env[67008]: Faults: ['InvalidArgument'] [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Traceback (most recent call last): [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self.driver.spawn(context, instance, image_meta, [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self._fetch_image_if_missing(context, vi) [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] image_cache(vi, tmp_image_ds_loc) [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] vm_util.copy_virtual_disk( [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] session._wait_for_task(vmdk_copy_task) [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] return self.wait_for_task(task_ref) [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] return evt.wait() [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] result = hub.switch() [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] return self.greenlet.switch() [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] self.f(*self.args, **self.kw) [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] raise exceptions.translate_fault(task_info.error) [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Faults: ['InvalidArgument'] [ 1946.704185] env[67008]: ERROR nova.compute.manager [instance: 0c45068a-d333-4247-841d-bf40ebb779da] [ 1946.705284] env[67008]: DEBUG nova.compute.utils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1946.706227] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Build of instance 0c45068a-d333-4247-841d-bf40ebb779da was re-scheduled: A specified parameter was not correct: fileType [ 1946.706227] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1946.706625] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1946.706843] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1946.706988] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquired lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1946.707161] env[67008]: DEBUG nova.network.neutron [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1946.733629] env[67008]: DEBUG nova.network.neutron [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1946.797619] env[67008]: DEBUG nova.network.neutron [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.806502] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Releasing lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1946.806717] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1946.806895] env[67008]: DEBUG nova.compute.manager [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Skipping network deallocation for instance since networking was not requested. {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1946.903587] env[67008]: INFO nova.scheduler.client.report [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Deleted allocations for instance 0c45068a-d333-4247-841d-bf40ebb779da [ 1946.922073] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b97469d-f58a-4521-a34e-8feea5dd61d5 tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "0c45068a-d333-4247-841d-bf40ebb779da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 547.832s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1946.922334] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "0c45068a-d333-4247-841d-bf40ebb779da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 352.050s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1946.922561] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "0c45068a-d333-4247-841d-bf40ebb779da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1946.922766] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "0c45068a-d333-4247-841d-bf40ebb779da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1946.922953] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "0c45068a-d333-4247-841d-bf40ebb779da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1946.924782] env[67008]: INFO nova.compute.manager [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Terminating instance [ 1946.926316] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquiring lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1946.926475] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Acquired lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1946.926641] env[67008]: DEBUG nova.network.neutron [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1946.952258] env[67008]: DEBUG nova.network.neutron [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1947.011296] env[67008]: DEBUG nova.network.neutron [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.021416] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Releasing lock "refresh_cache-0c45068a-d333-4247-841d-bf40ebb779da" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1947.021848] env[67008]: DEBUG nova.compute.manager [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1947.022057] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1947.022871] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d4ca3fb-9b4a-4808-9057-d72409445c40 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.031365] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5986923-e21c-4423-b5b5-b44818051e3b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.060936] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0c45068a-d333-4247-841d-bf40ebb779da could not be found. [ 1947.061095] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1947.062029] env[67008]: INFO nova.compute.manager [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1947.062029] env[67008]: DEBUG oslo.service.loopingcall [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1947.062029] env[67008]: DEBUG nova.compute.manager [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1947.062029] env[67008]: DEBUG nova.network.neutron [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1947.079133] env[67008]: DEBUG nova.network.neutron [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1947.086578] env[67008]: DEBUG nova.network.neutron [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.094909] env[67008]: INFO nova.compute.manager [-] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] Took 0.03 seconds to deallocate network for instance. [ 1947.179023] env[67008]: DEBUG oslo_concurrency.lockutils [None req-96fae4e4-8be9-489a-aa2a-f1bf8061f86a tempest-ServersAaction247Test-1213393996 tempest-ServersAaction247Test-1213393996-project-member] Lock "0c45068a-d333-4247-841d-bf40ebb779da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.256s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1947.179406] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "0c45068a-d333-4247-841d-bf40ebb779da" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 171.147s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1947.179596] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 0c45068a-d333-4247-841d-bf40ebb779da] During sync_power_state the instance has a pending task (deleting). Skip. [ 1947.179762] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "0c45068a-d333-4247-841d-bf40ebb779da" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1988.857681] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.853721] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.856398] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.868404] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1992.868614] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1992.868782] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1992.868934] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1992.870040] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bb8310-c894-49f2-9571-754f3d627039 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.878844] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d48dfe-92eb-4686-b0e6-4541ed8f8f9c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.892302] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84841bc9-ec3d-4319-9a56-d7cea5f2f04e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.898254] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0ad598-89ab-4795-a00f-c4f81155f612 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.927551] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181024MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1992.927679] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1992.927860] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1992.997133] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.997289] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.997415] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.997533] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.997651] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.997783] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.997912] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.998037] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 64ed3874-13e7-495e-9676-1757f27a1256 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.998152] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1992.998327] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1992.998458] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1993.032565] env[67008]: WARNING oslo_vmware.rw_handles [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1993.032565] env[67008]: ERROR oslo_vmware.rw_handles [ 1993.032565] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1993.034748] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1993.034988] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Copying Virtual Disk [datastore1] vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/4139028f-772f-4f7b-a0d2-48cbc04386ca/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1993.035260] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e486fd9-4d59-4552-a6d1-e4ba40c6d584 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.045595] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1993.045595] env[67008]: value = "task-2825016" [ 1993.045595] env[67008]: _type = "Task" [ 1993.045595] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.055889] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': task-2825016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.123581] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1e3ecf-c979-436e-b3bd-acb323e155b1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.131519] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3406b55-c554-49d4-8ea3-03de60306443 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.161293] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ae1085-680e-46f1-9901-9bbd2a24c0cb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.168383] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac93b900-0dc2-4ee8-994d-9e1dcd5ee30d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.182233] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1993.190969] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1993.206917] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1993.207142] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.279s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1993.555372] env[67008]: DEBUG oslo_vmware.exceptions [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1993.555681] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1993.556251] env[67008]: ERROR nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1993.556251] env[67008]: Faults: ['InvalidArgument'] [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Traceback (most recent call last): [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] yield resources [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self.driver.spawn(context, instance, image_meta, [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self._fetch_image_if_missing(context, vi) [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] image_cache(vi, tmp_image_ds_loc) [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] vm_util.copy_virtual_disk( [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] session._wait_for_task(vmdk_copy_task) [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] return self.wait_for_task(task_ref) [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] return evt.wait() [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] result = hub.switch() [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] return self.greenlet.switch() [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self.f(*self.args, **self.kw) [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] raise exceptions.translate_fault(task_info.error) [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Faults: ['InvalidArgument'] [ 1993.556251] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] [ 1993.557420] env[67008]: INFO nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Terminating instance [ 1993.558083] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1993.558288] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1993.558534] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb108e03-ef38-4c37-8089-3626ae559a55 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.560878] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1993.561080] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1993.561808] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490ef109-cc22-4300-bd78-bcb1a9bf92ea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.568494] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1993.568699] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a8209cb-e4fc-438a-b16e-3fa5f9f205a7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.570851] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1993.571056] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1993.571990] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eec8c24-fc06-40b6-b338-553b0a865e2a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.576773] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Waiting for the task: (returnval){ [ 1993.576773] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522b4d18-2aad-91ff-836f-79227bcfac59" [ 1993.576773] env[67008]: _type = "Task" [ 1993.576773] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.583474] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522b4d18-2aad-91ff-836f-79227bcfac59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.642975] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1993.643167] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1993.643347] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Deleting the datastore file [datastore1] 7868f806-e64f-4964-9e1e-bcb8d29e685f {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1993.643651] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31a143f1-90cb-443e-ae5e-ce593870037f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.649612] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for the task: (returnval){ [ 1993.649612] env[67008]: value = "task-2825018" [ 1993.649612] env[67008]: _type = "Task" [ 1993.649612] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.657065] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': task-2825018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.087581] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1994.087974] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Creating directory with path [datastore1] vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1994.088114] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89fa9223-d7a5-44cb-9d2b-036c1a642219 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.099045] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Created directory with path [datastore1] vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1994.099299] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Fetch image to [datastore1] vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1994.099417] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1994.100107] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0901c7d-9c6b-49f8-99bd-f5d0cc6c3bdc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.106435] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2486f4-db53-41ee-88b3-f880ce02f805 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.115006] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a96204-0318-48b2-b7c8-8d527dec18a9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.146546] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6918b6-c128-4c72-8426-42e08bf96d48 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.154304] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-72b9babd-7558-475e-9765-d1ea6377c96e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.158911] env[67008]: DEBUG oslo_vmware.api [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Task: {'id': task-2825018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062987} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.159143] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1994.159314] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1994.159473] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1994.159636] env[67008]: INFO nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1994.161616] env[67008]: DEBUG nova.compute.claims [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1994.161830] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1994.162074] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1994.175115] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1994.207040] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.207210] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1994.224465] env[67008]: DEBUG oslo_vmware.rw_handles [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1994.287867] env[67008]: DEBUG oslo_vmware.rw_handles [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1994.288081] env[67008]: DEBUG oslo_vmware.rw_handles [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1994.362642] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4068b3a0-d09e-4a94-8bb4-ab429a5eef05 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.369732] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d299022-e2c6-4821-a4e9-603cedd12850 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.399202] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3c17c9-a447-4aea-addb-b0d09ba50ee5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.406118] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f75aeb8-1aad-440f-824b-5366f33fd2f4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.419966] env[67008]: DEBUG nova.compute.provider_tree [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1994.428597] env[67008]: DEBUG nova.scheduler.client.report [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1994.441721] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.280s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1994.442268] env[67008]: ERROR nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1994.442268] env[67008]: Faults: ['InvalidArgument'] [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Traceback (most recent call last): [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self.driver.spawn(context, instance, image_meta, [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self._fetch_image_if_missing(context, vi) [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] image_cache(vi, tmp_image_ds_loc) [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] vm_util.copy_virtual_disk( [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] session._wait_for_task(vmdk_copy_task) [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] return self.wait_for_task(task_ref) [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] return evt.wait() [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] result = hub.switch() [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] return self.greenlet.switch() [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] self.f(*self.args, **self.kw) [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] raise exceptions.translate_fault(task_info.error) [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Faults: ['InvalidArgument'] [ 1994.442268] env[67008]: ERROR nova.compute.manager [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] [ 1994.443295] env[67008]: DEBUG nova.compute.utils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1994.444399] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Build of instance 7868f806-e64f-4964-9e1e-bcb8d29e685f was re-scheduled: A specified parameter was not correct: fileType [ 1994.444399] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1994.444772] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1994.444941] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1994.445128] env[67008]: DEBUG nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1994.445288] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1994.801006] env[67008]: DEBUG nova.network.neutron [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.814703] env[67008]: INFO nova.compute.manager [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Took 0.37 seconds to deallocate network for instance. [ 1994.857335] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.913204] env[67008]: INFO nova.scheduler.client.report [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Deleted allocations for instance 7868f806-e64f-4964-9e1e-bcb8d29e685f [ 1994.935781] env[67008]: DEBUG oslo_concurrency.lockutils [None req-1d3c30db-ec3f-41cc-a0eb-2eaeb7ee00a7 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 594.864s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1994.936081] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 399.548s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1994.936308] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Acquiring lock "7868f806-e64f-4964-9e1e-bcb8d29e685f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1994.936516] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1994.936708] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1994.940606] env[67008]: INFO nova.compute.manager [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Terminating instance [ 1994.942352] env[67008]: DEBUG nova.compute.manager [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1994.942540] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1994.944029] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24240dde-9247-4fa1-ac2c-116baf8d6df2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.952639] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f9cdc4-9bc4-4ddb-b54a-d4b87482c361 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.981050] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7868f806-e64f-4964-9e1e-bcb8d29e685f could not be found. [ 1994.981265] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1994.981439] env[67008]: INFO nova.compute.manager [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1994.981686] env[67008]: DEBUG oslo.service.loopingcall [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1994.981907] env[67008]: DEBUG nova.compute.manager [-] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1994.982016] env[67008]: DEBUG nova.network.neutron [-] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1995.004187] env[67008]: DEBUG nova.network.neutron [-] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.011764] env[67008]: INFO nova.compute.manager [-] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] Took 0.03 seconds to deallocate network for instance. [ 1995.095059] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d127c0d1-4f1d-4bdf-951e-89b7073f08a6 tempest-AttachVolumeTestJSON-160458498 tempest-AttachVolumeTestJSON-160458498-project-member] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1995.096369] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 219.064s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1995.096622] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 7868f806-e64f-4964-9e1e-bcb8d29e685f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1995.096849] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "7868f806-e64f-4964-9e1e-bcb8d29e685f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1995.859297] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.859489] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1995.859616] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1995.878307] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.878467] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.878621] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.878754] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.878876] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.878997] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.879295] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.879432] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1995.879554] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1997.856632] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1999.858887] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1999.859299] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.852429] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.720891] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2018.721134] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2018.730800] env[67008]: DEBUG nova.compute.manager [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2018.780102] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2018.780373] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2018.782277] env[67008]: INFO nova.compute.claims [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2018.932463] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d56e01-1c07-4df2-bc91-475bbf921f54 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.947605] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cd74df-1e14-460c-84db-bf73d4ae6484 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.979410] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353a0c14-04b7-4a2d-bf68-9779b4cb99c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.986889] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6386dc4-7775-4f93-92e4-e59cb496259a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.999737] env[67008]: DEBUG nova.compute.provider_tree [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2019.008581] env[67008]: DEBUG nova.scheduler.client.report [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2019.021771] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.241s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2019.022244] env[67008]: DEBUG nova.compute.manager [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2019.054555] env[67008]: DEBUG nova.compute.utils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2019.055886] env[67008]: DEBUG nova.compute.manager [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2019.056067] env[67008]: DEBUG nova.network.neutron [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2019.063533] env[67008]: DEBUG nova.compute.manager [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2019.122685] env[67008]: DEBUG nova.policy [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e97f4af320914c1d9256c28327a4a1c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ff81b1d640c340dca8bcf28c7c3a2f01', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 2019.127638] env[67008]: DEBUG nova.compute.manager [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2019.152464] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2019.152703] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2019.152860] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2019.153049] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2019.153201] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2019.153348] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2019.153567] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2019.153764] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2019.153938] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2019.154110] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2019.154282] env[67008]: DEBUG nova.virt.hardware [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2019.155120] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a4cd23-2350-495e-a67f-e05caad7f78a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.162700] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc024301-dd3e-4a56-83cb-5a4ea31056ea {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.397066] env[67008]: DEBUG nova.network.neutron [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Successfully created port: 44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2019.866786] env[67008]: DEBUG nova.compute.manager [req-a7dbfe12-8441-4ded-920b-92ba85936557 req-f526e70a-d9b4-441f-9324-4a3d387b5861 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Received event network-vif-plugged-44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2019.867080] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7dbfe12-8441-4ded-920b-92ba85936557 req-f526e70a-d9b4-441f-9324-4a3d387b5861 service nova] Acquiring lock "bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2019.867212] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7dbfe12-8441-4ded-920b-92ba85936557 req-f526e70a-d9b4-441f-9324-4a3d387b5861 service nova] Lock "bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2019.867375] env[67008]: DEBUG oslo_concurrency.lockutils [req-a7dbfe12-8441-4ded-920b-92ba85936557 req-f526e70a-d9b4-441f-9324-4a3d387b5861 service nova] Lock "bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2019.867535] env[67008]: DEBUG nova.compute.manager [req-a7dbfe12-8441-4ded-920b-92ba85936557 req-f526e70a-d9b4-441f-9324-4a3d387b5861 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] No waiting events found dispatching network-vif-plugged-44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2019.867710] env[67008]: WARNING nova.compute.manager [req-a7dbfe12-8441-4ded-920b-92ba85936557 req-f526e70a-d9b4-441f-9324-4a3d387b5861 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Received unexpected event network-vif-plugged-44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd for instance with vm_state building and task_state spawning. [ 2019.941770] env[67008]: DEBUG nova.network.neutron [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Successfully updated port: 44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2019.953859] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "refresh_cache-bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2019.954061] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired lock "refresh_cache-bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2019.954372] env[67008]: DEBUG nova.network.neutron [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2019.993140] env[67008]: DEBUG nova.network.neutron [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2020.140128] env[67008]: DEBUG nova.network.neutron [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Updating instance_info_cache with network_info: [{"id": "44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd", "address": "fa:16:3e:5d:88:53", "network": {"id": "542bddd3-3fe8-429e-8c06-3a88230d19ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-298851449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff81b1d640c340dca8bcf28c7c3a2f01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f6ba8e-8b", "ovs_interfaceid": "44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.151308] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Releasing lock "refresh_cache-bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2020.151600] env[67008]: DEBUG nova.compute.manager [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Instance network_info: |[{"id": "44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd", "address": "fa:16:3e:5d:88:53", "network": {"id": "542bddd3-3fe8-429e-8c06-3a88230d19ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-298851449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff81b1d640c340dca8bcf28c7c3a2f01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f6ba8e-8b", "ovs_interfaceid": "44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2020.151986] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:88:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2020.159769] env[67008]: DEBUG oslo.service.loopingcall [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2020.160207] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2020.160575] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddc89bfc-98b0-440e-835e-99ba0540e8bc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.180350] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2020.180350] env[67008]: value = "task-2825019" [ 2020.180350] env[67008]: _type = "Task" [ 2020.180350] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.187962] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825019, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.690512] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825019, 'name': CreateVM_Task, 'duration_secs': 0.272568} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.690690] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2020.691396] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2020.691559] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2020.691965] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2020.692228] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd09d469-af00-49a2-a472-1f4437581293 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.696393] env[67008]: DEBUG oslo_vmware.api [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for the task: (returnval){ [ 2020.696393] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ff0b1f-63cd-bf7b-6a4b-5d2de57573c0" [ 2020.696393] env[67008]: _type = "Task" [ 2020.696393] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.704157] env[67008]: DEBUG oslo_vmware.api [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52ff0b1f-63cd-bf7b-6a4b-5d2de57573c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.207134] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2021.207465] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2021.207573] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2021.894916] env[67008]: DEBUG nova.compute.manager [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Received event network-changed-44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2021.895136] env[67008]: DEBUG nova.compute.manager [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Refreshing instance network info cache due to event network-changed-44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2021.895348] env[67008]: DEBUG oslo_concurrency.lockutils [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] Acquiring lock "refresh_cache-bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2021.895491] env[67008]: DEBUG oslo_concurrency.lockutils [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] Acquired lock "refresh_cache-bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2021.895679] env[67008]: DEBUG nova.network.neutron [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Refreshing network info cache for port 44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2022.121816] env[67008]: DEBUG nova.network.neutron [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Updated VIF entry in instance network info cache for port 44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2022.122182] env[67008]: DEBUG nova.network.neutron [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Updating instance_info_cache with network_info: [{"id": "44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd", "address": "fa:16:3e:5d:88:53", "network": {"id": "542bddd3-3fe8-429e-8c06-3a88230d19ae", "bridge": "br-int", "label": "tempest-ServersTestJSON-298851449-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ff81b1d640c340dca8bcf28c7c3a2f01", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f6ba8e-8b", "ovs_interfaceid": "44f6ba8e-8bb7-4aff-8f16-4db14f8fc4cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.131686] env[67008]: DEBUG oslo_concurrency.lockutils [req-1b991952-b0da-45fb-a629-658f111e7be2 req-e71f7c12-0a85-4b6f-9315-063a7dfe1a82 service nova] Releasing lock "refresh_cache-bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2036.422059] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Acquiring lock "029e6a15-2f1d-42bf-b5ef-286e82ba7c0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2036.422400] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Lock "029e6a15-2f1d-42bf-b5ef-286e82ba7c0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2036.433343] env[67008]: DEBUG nova.compute.manager [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2036.477910] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2036.478174] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2036.479507] env[67008]: INFO nova.compute.claims [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2036.633398] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5903afe-f334-43f4-80a2-c96a51b581fb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.641015] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f68993-059a-478a-bda7-4f4406801783 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.671048] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e95f50-5e36-4cc8-922e-fde2aa0eabcd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.678114] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3beaff39-48fb-42ac-bf27-aa6a038735f7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.692550] env[67008]: DEBUG nova.compute.provider_tree [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2036.702080] env[67008]: DEBUG nova.scheduler.client.report [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2036.714795] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.237s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2036.715261] env[67008]: DEBUG nova.compute.manager [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2036.746319] env[67008]: DEBUG nova.compute.utils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2036.747703] env[67008]: DEBUG nova.compute.manager [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Not allocating networking since 'none' was specified. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 2036.757788] env[67008]: DEBUG nova.compute.manager [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2036.814888] env[67008]: DEBUG nova.compute.manager [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2036.839662] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2036.839893] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2036.840084] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2036.840249] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2036.840391] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2036.840533] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2036.840732] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2036.840889] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2036.841066] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2036.841239] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2036.841409] env[67008]: DEBUG nova.virt.hardware [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2036.842252] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a633790e-a22d-46f5-bfc0-38b8e7772de7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.849760] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14205b57-2daa-48eb-aae4-75c00fd052c0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.863098] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Instance VIF info [] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2036.868559] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Creating folder: Project (186fc222a8614f999345ac122aaa6868). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2036.868801] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52d2a679-d132-40ac-99c5-3a35496b761a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.877735] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Created folder: Project (186fc222a8614f999345ac122aaa6868) in parent group-v567993. [ 2036.877911] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Creating folder: Instances. Parent ref: group-v568095. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2036.878119] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17fd5bf7-dce1-4a94-85bc-79ff3d05dfd7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.884863] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Created folder: Instances in parent group-v568095. [ 2036.885087] env[67008]: DEBUG oslo.service.loopingcall [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.885259] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2036.885436] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3342add-edcd-4b72-8196-8e8c9bcf2544 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.900331] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2036.900331] env[67008]: value = "task-2825022" [ 2036.900331] env[67008]: _type = "Task" [ 2036.900331] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.907011] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825022, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.410676] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825022, 'name': CreateVM_Task, 'duration_secs': 0.24376} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.410830] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2037.411537] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2037.411724] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2037.412112] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2037.412364] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee527cf3-e4d4-4a92-ad96-c6bc1408e6ab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.416448] env[67008]: DEBUG oslo_vmware.api [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Waiting for the task: (returnval){ [ 2037.416448] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f37e6b-29ea-d202-8737-0d5dc22f08df" [ 2037.416448] env[67008]: _type = "Task" [ 2037.416448] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.423554] env[67008]: DEBUG oslo_vmware.api [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f37e6b-29ea-d202-8737-0d5dc22f08df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.927375] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2037.927668] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2037.927978] env[67008]: DEBUG oslo_concurrency.lockutils [None req-62c39394-c8ee-4c10-9d91-da42ad9f0c46 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2039.857616] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.612484] env[67008]: WARNING oslo_vmware.rw_handles [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2041.612484] env[67008]: ERROR oslo_vmware.rw_handles [ 2041.612484] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2041.614708] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2041.615111] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Copying Virtual Disk [datastore1] vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/07af8660-2a8d-41bd-920d-b7abf827cbda/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2041.615440] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6a53860-7d14-4a12-926b-05f77b04da18 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.623107] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Waiting for the task: (returnval){ [ 2041.623107] env[67008]: value = "task-2825023" [ 2041.623107] env[67008]: _type = "Task" [ 2041.623107] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.631302] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Task: {'id': task-2825023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.134993] env[67008]: DEBUG oslo_vmware.exceptions [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2042.135291] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2042.135852] env[67008]: ERROR nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2042.135852] env[67008]: Faults: ['InvalidArgument'] [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Traceback (most recent call last): [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] yield resources [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self.driver.spawn(context, instance, image_meta, [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self._fetch_image_if_missing(context, vi) [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] image_cache(vi, tmp_image_ds_loc) [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] vm_util.copy_virtual_disk( [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] session._wait_for_task(vmdk_copy_task) [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] return self.wait_for_task(task_ref) [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] return evt.wait() [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] result = hub.switch() [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] return self.greenlet.switch() [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self.f(*self.args, **self.kw) [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] raise exceptions.translate_fault(task_info.error) [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Faults: ['InvalidArgument'] [ 2042.135852] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] [ 2042.137181] env[67008]: INFO nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Terminating instance [ 2042.137639] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2042.137841] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2042.138091] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04ce87c7-6695-4961-9ded-831eb750d7cf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.140219] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2042.140409] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2042.141161] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46db44a3-4cd8-4fe7-b9e7-ba6ed23eafe6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.148093] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2042.149276] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32e761c0-2faa-4bde-bb6c-99842a347b87 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.150624] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2042.150796] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2042.151464] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4ca34b9-4803-46dd-b942-59e7c3b326dd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.156666] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 2042.156666] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d980fb-8b63-b812-aa37-3a6a7668ed6c" [ 2042.156666] env[67008]: _type = "Task" [ 2042.156666] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.163651] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d980fb-8b63-b812-aa37-3a6a7668ed6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.213025] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2042.213272] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2042.213423] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Deleting the datastore file [datastore1] 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2042.213701] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1891351-6486-4951-bacf-cbdb2dae5b92 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.219379] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Waiting for the task: (returnval){ [ 2042.219379] env[67008]: value = "task-2825025" [ 2042.219379] env[67008]: _type = "Task" [ 2042.219379] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.226983] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Task: {'id': task-2825025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.666513] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2042.666861] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating directory with path [datastore1] vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2042.667052] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a777ce5-1ef1-4df5-b089-ce75e02ddca4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.678686] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Created directory with path [datastore1] vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2042.678939] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Fetch image to [datastore1] vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2042.679109] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2042.679760] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41883c9e-a5a6-4453-9fa1-e576d4d2ac19 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.685964] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a12ae10-8102-48ee-a7bb-e80dfe379b31 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.694732] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6289a0aa-3163-47a6-a72e-de83e33151bf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.727246] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38f13f4-87a8-4992-876d-0c1111e5987d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.733969] env[67008]: DEBUG oslo_vmware.api [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Task: {'id': task-2825025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067107} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.735410] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2042.735604] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2042.735802] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2042.735984] env[67008]: INFO nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2042.737743] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ecb0ef6f-8086-42e6-9af1-e943c5b2caab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.739580] env[67008]: DEBUG nova.compute.claims [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2042.739750] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2042.739959] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2042.760699] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2042.809621] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2042.868794] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2042.868981] env[67008]: DEBUG oslo_vmware.rw_handles [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2042.958667] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddbe35c-40a3-4129-937c-a286fdadbb1c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.966337] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329f688f-5302-4bcb-a8aa-121d4e8590c0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.995311] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ee66e3-b3fd-412b-b969-f069346e2fa0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.001977] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d00ef31-413f-41cd-909d-e01f68b02dc9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.014462] env[67008]: DEBUG nova.compute.provider_tree [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2043.022414] env[67008]: DEBUG nova.scheduler.client.report [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2043.036811] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.297s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2043.037321] env[67008]: ERROR nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2043.037321] env[67008]: Faults: ['InvalidArgument'] [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Traceback (most recent call last): [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self.driver.spawn(context, instance, image_meta, [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self._fetch_image_if_missing(context, vi) [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] image_cache(vi, tmp_image_ds_loc) [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] vm_util.copy_virtual_disk( [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] session._wait_for_task(vmdk_copy_task) [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] return self.wait_for_task(task_ref) [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] return evt.wait() [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] result = hub.switch() [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] return self.greenlet.switch() [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] self.f(*self.args, **self.kw) [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] raise exceptions.translate_fault(task_info.error) [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Faults: ['InvalidArgument'] [ 2043.037321] env[67008]: ERROR nova.compute.manager [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] [ 2043.038379] env[67008]: DEBUG nova.compute.utils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2043.039603] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Build of instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 was re-scheduled: A specified parameter was not correct: fileType [ 2043.039603] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2043.039967] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2043.040151] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2043.040319] env[67008]: DEBUG nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2043.040479] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2043.541964] env[67008]: DEBUG nova.network.neutron [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.552813] env[67008]: INFO nova.compute.manager [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Took 0.51 seconds to deallocate network for instance. [ 2043.638390] env[67008]: INFO nova.scheduler.client.report [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Deleted allocations for instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 [ 2043.657643] env[67008]: DEBUG oslo_concurrency.lockutils [None req-00088ce7-c99f-4b1e-b2ca-6f6bbb5c1284 tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 607.463s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2043.657916] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 412.016s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2043.658164] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Acquiring lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2043.658378] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2043.658568] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2043.660541] env[67008]: INFO nova.compute.manager [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Terminating instance [ 2043.662233] env[67008]: DEBUG nova.compute.manager [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2043.662430] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2043.662890] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd7d1c89-34f0-4c76-8439-830acfbf6520 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.673020] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72a8b8c-5707-4826-af1f-bb0bb80bc3e6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.703779] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848 could not be found. [ 2043.704006] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2043.704196] env[67008]: INFO nova.compute.manager [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2043.704441] env[67008]: DEBUG oslo.service.loopingcall [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2043.704679] env[67008]: DEBUG nova.compute.manager [-] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2043.704779] env[67008]: DEBUG nova.network.neutron [-] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2043.732242] env[67008]: DEBUG nova.network.neutron [-] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.739854] env[67008]: INFO nova.compute.manager [-] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] Took 0.04 seconds to deallocate network for instance. [ 2043.824814] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d78115d6-adeb-4980-bdcf-945c083eb34b tempest-ServersNegativeTestJSON-26290777 tempest-ServersNegativeTestJSON-26290777-project-member] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2043.826141] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 267.793s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2043.826363] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 9a8af81a-fbbc-4f1f-9540-3a7fa9f56848] During sync_power_state the instance has a pending task (deleting). Skip. [ 2043.826544] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "9a8af81a-fbbc-4f1f-9540-3a7fa9f56848" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2050.864758] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.852878] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.856535] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.868517] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2052.868733] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2052.868934] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2052.869108] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2052.870362] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048c3d98-d74e-47ef-8bdf-0ab8cba45e26 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.879174] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd39202-aabf-4e9d-81e8-87c01318fb90 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.893092] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efac5baa-030c-469c-8252-7c5cb43a7980 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.899082] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa0e243-bc38-4bef-b8da-e2fec9810954 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.929163] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181087MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2052.929313] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2052.929485] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2053.060939] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061141] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061278] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061404] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061523] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061688] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 64ed3874-13e7-495e-9676-1757f27a1256 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061776] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061861] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.061974] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2053.062190] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2053.063020] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2053.168845] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788507d9-3e0c-4b4e-9974-124868883422 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.176439] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab3fc75-2d8f-4e33-a2dd-656bdcfb057a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.205605] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48988044-12ac-4696-bf55-ac5f9ef03421 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.212289] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61fee28-d55e-4875-8a53-ba75e463b7cd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.224778] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2053.232975] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2053.245919] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2053.246127] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.317s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2055.246557] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.246914] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2056.858063] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2056.858063] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2056.858518] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2056.877814] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878018] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878103] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878232] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878352] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878471] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878588] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878703] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878816] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2056.878930] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2056.879426] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.857056] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.857459] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.857506] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances with incomplete migration {{(pid=67008) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2060.867652] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2060.867936] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.856875] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.857281] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2062.868397] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] There are 0 instances to clean {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2067.598108] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2092.900181] env[67008]: WARNING oslo_vmware.rw_handles [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2092.900181] env[67008]: ERROR oslo_vmware.rw_handles [ 2092.900888] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2092.902610] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2092.902849] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Copying Virtual Disk [datastore1] vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/c30cfb37-1986-4049-be5c-928c08b0bbe9/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2092.903153] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47149217-46c5-4628-afba-31072028f2e2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.911626] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 2092.911626] env[67008]: value = "task-2825026" [ 2092.911626] env[67008]: _type = "Task" [ 2092.911626] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.919259] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2825026, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.422325] env[67008]: DEBUG oslo_vmware.exceptions [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2093.422596] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2093.423145] env[67008]: ERROR nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2093.423145] env[67008]: Faults: ['InvalidArgument'] [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Traceback (most recent call last): [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] yield resources [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self.driver.spawn(context, instance, image_meta, [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self._fetch_image_if_missing(context, vi) [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] image_cache(vi, tmp_image_ds_loc) [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] vm_util.copy_virtual_disk( [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] session._wait_for_task(vmdk_copy_task) [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] return self.wait_for_task(task_ref) [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] return evt.wait() [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] result = hub.switch() [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] return self.greenlet.switch() [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self.f(*self.args, **self.kw) [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] raise exceptions.translate_fault(task_info.error) [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Faults: ['InvalidArgument'] [ 2093.423145] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] [ 2093.424329] env[67008]: INFO nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Terminating instance [ 2093.425050] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2093.425183] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2093.425401] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc482150-204a-4139-8256-4118185f08bd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.427521] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2093.427734] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2093.428452] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672fba36-e5c0-46c7-bb9a-39914077b512 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.435052] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2093.435261] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0220bf0-edf9-4575-86f2-148c32547d06 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.437344] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2093.437513] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2093.438437] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de82707a-a7f8-4fd5-891a-729cde7d3185 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.443202] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 2093.443202] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d20069-30a6-13ac-9ac5-b423ce0a3558" [ 2093.443202] env[67008]: _type = "Task" [ 2093.443202] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.450196] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52d20069-30a6-13ac-9ac5-b423ce0a3558, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.505980] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2093.506210] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2093.506394] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleting the datastore file [datastore1] 6ede77a1-aa76-4e9f-8beb-80131e7990da {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2093.506700] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-616be2f2-c580-4d32-a2fe-c3f7e131894e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.512760] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for the task: (returnval){ [ 2093.512760] env[67008]: value = "task-2825028" [ 2093.512760] env[67008]: _type = "Task" [ 2093.512760] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.521567] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2825028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.953460] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2093.953773] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating directory with path [datastore1] vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2093.953991] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d5d5cec-f976-4418-a94f-9de21bcbe99a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.970577] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Created directory with path [datastore1] vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2093.970762] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Fetch image to [datastore1] vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2093.970928] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2093.971680] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26ab1b4-26d7-4e20-863e-a94a01c33d56 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.978115] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92cedc90-d954-4be8-b7a6-6b1e6dd55166 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.987119] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a320ed-49b9-457e-a17d-1de414bee46e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.020372] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485fa26f-409c-49df-9900-49a91514d4ac {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.027120] env[67008]: DEBUG oslo_vmware.api [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Task: {'id': task-2825028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072723} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.028598] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2094.028789] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2094.028973] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2094.029150] env[67008]: INFO nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2094.031290] env[67008]: DEBUG nova.compute.claims [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2094.031462] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2094.031668] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2094.034148] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-54616dd7-22ca-429a-b919-3c9e7cc38a58 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.056054] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2094.106604] env[67008]: DEBUG oslo_vmware.rw_handles [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2094.165113] env[67008]: DEBUG oslo_vmware.rw_handles [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2094.165311] env[67008]: DEBUG oslo_vmware.rw_handles [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2094.236051] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61909369-7a21-45f1-8333-6621f259f388 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.243571] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53a2938-8351-4d2c-9de1-329da28b90ae {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.273551] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1756e689-af9a-4a5a-a71b-8bc87f6b8c47 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.280290] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d0fe24-e9eb-407d-8baf-1c50885aca92 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.292794] env[67008]: DEBUG nova.compute.provider_tree [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2094.300982] env[67008]: DEBUG nova.scheduler.client.report [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2094.314200] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.282s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2094.314720] env[67008]: ERROR nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2094.314720] env[67008]: Faults: ['InvalidArgument'] [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Traceback (most recent call last): [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self.driver.spawn(context, instance, image_meta, [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self._fetch_image_if_missing(context, vi) [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] image_cache(vi, tmp_image_ds_loc) [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] vm_util.copy_virtual_disk( [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] session._wait_for_task(vmdk_copy_task) [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] return self.wait_for_task(task_ref) [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] return evt.wait() [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] result = hub.switch() [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] return self.greenlet.switch() [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] self.f(*self.args, **self.kw) [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] raise exceptions.translate_fault(task_info.error) [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Faults: ['InvalidArgument'] [ 2094.314720] env[67008]: ERROR nova.compute.manager [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] [ 2094.315701] env[67008]: DEBUG nova.compute.utils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2094.316767] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Build of instance 6ede77a1-aa76-4e9f-8beb-80131e7990da was re-scheduled: A specified parameter was not correct: fileType [ 2094.316767] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2094.317144] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2094.317316] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2094.317484] env[67008]: DEBUG nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2094.317668] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2094.847973] env[67008]: DEBUG nova.network.neutron [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.860531] env[67008]: INFO nova.compute.manager [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Took 0.54 seconds to deallocate network for instance. [ 2094.956925] env[67008]: INFO nova.scheduler.client.report [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Deleted allocations for instance 6ede77a1-aa76-4e9f-8beb-80131e7990da [ 2094.982327] env[67008]: DEBUG oslo_concurrency.lockutils [None req-a1766739-ed74-4091-829a-46627327f1d2 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 617.763s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2094.982588] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 422.150s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2094.982816] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Acquiring lock "6ede77a1-aa76-4e9f-8beb-80131e7990da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2094.983031] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2094.983206] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2094.985100] env[67008]: INFO nova.compute.manager [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Terminating instance [ 2094.986757] env[67008]: DEBUG nova.compute.manager [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2094.986951] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2094.987422] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2362a428-1fd0-4810-a52c-1ae80a617686 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.996758] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f773bde0-c2a0-4707-b0ad-5c8779546f7a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.023975] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ede77a1-aa76-4e9f-8beb-80131e7990da could not be found. [ 2095.024187] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2095.024359] env[67008]: INFO nova.compute.manager [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2095.024602] env[67008]: DEBUG oslo.service.loopingcall [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2095.024883] env[67008]: DEBUG nova.compute.manager [-] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2095.025493] env[67008]: DEBUG nova.network.neutron [-] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2095.057868] env[67008]: DEBUG nova.network.neutron [-] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2095.064622] env[67008]: INFO nova.compute.manager [-] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] Took 0.04 seconds to deallocate network for instance. [ 2095.167399] env[67008]: DEBUG oslo_concurrency.lockutils [None req-aa5147e2-1203-4a48-81d5-744b232850f1 tempest-ImagesTestJSON-1740946510 tempest-ImagesTestJSON-1740946510-project-member] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2095.168615] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 319.136s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2095.168932] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 6ede77a1-aa76-4e9f-8beb-80131e7990da] During sync_power_state the instance has a pending task (deleting). Skip. [ 2095.169131] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "6ede77a1-aa76-4e9f-8beb-80131e7990da" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2110.868264] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.858788] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.877212] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2112.877441] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2112.877611] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2112.877780] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2112.879508] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f5d445-51dc-40a1-ab15-cd85e3e5eb3d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.888166] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46df6949-dcff-4d2d-ad22-2dc096377494 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.902313] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63684baf-743b-45f0-b0cf-0def3f07edbb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.908463] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6599bbad-3628-4b73-a1ed-e3950117b4a3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.936842] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181054MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2112.937055] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2112.937229] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2113.022688] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.022846] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.022972] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.023111] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.023229] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 64ed3874-13e7-495e-9676-1757f27a1256 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.023341] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.023454] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.023563] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2113.023735] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2113.023866] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1600MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2113.039707] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing inventories for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2113.052365] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Updating ProviderTree inventory for provider ad100a41-192a-4a03-bdd9-0a78ce856705 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2113.052541] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Updating inventory in ProviderTree for provider ad100a41-192a-4a03-bdd9-0a78ce856705 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2113.062350] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing aggregate associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, aggregates: None {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2113.078256] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Refreshing trait associations for resource provider ad100a41-192a-4a03-bdd9-0a78ce856705, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=67008) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2113.167746] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14b8b87-f842-421c-afc2-5d3fc5b0fa52 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.175252] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e153dd3-344b-462d-b4fd-96d787386bc3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.204156] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0cd362-bc25-4141-ac9a-36019ffb4443 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.210933] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0bb319-f44e-4cca-aeb5-88dc58405614 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.223696] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.232158] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2113.244856] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2113.245082] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.308s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2115.240131] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.240489] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.240489] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2116.858152] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.857615] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.857967] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2118.857967] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2118.875705] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.875877] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.876012] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.876138] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.876263] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.876382] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.876499] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.876614] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2118.876730] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2118.877237] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.856188] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.856470] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.852901] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.641049] env[67008]: WARNING oslo_vmware.rw_handles [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2141.641049] env[67008]: ERROR oslo_vmware.rw_handles [ 2141.641049] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2141.642639] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2141.642896] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Copying Virtual Disk [datastore1] vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/baf460cb-3039-4878-88b6-5b853d6d1980/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2141.643194] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93dade43-be18-47e3-824a-c2b46225292d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.650800] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 2141.650800] env[67008]: value = "task-2825029" [ 2141.650800] env[67008]: _type = "Task" [ 2141.650800] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.658379] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': task-2825029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.161067] env[67008]: DEBUG oslo_vmware.exceptions [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2142.161346] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2142.161884] env[67008]: ERROR nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2142.161884] env[67008]: Faults: ['InvalidArgument'] [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Traceback (most recent call last): [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] yield resources [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self.driver.spawn(context, instance, image_meta, [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self._fetch_image_if_missing(context, vi) [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] image_cache(vi, tmp_image_ds_loc) [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] vm_util.copy_virtual_disk( [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] session._wait_for_task(vmdk_copy_task) [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] return self.wait_for_task(task_ref) [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] return evt.wait() [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] result = hub.switch() [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] return self.greenlet.switch() [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self.f(*self.args, **self.kw) [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] raise exceptions.translate_fault(task_info.error) [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Faults: ['InvalidArgument'] [ 2142.161884] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] [ 2142.162706] env[67008]: INFO nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Terminating instance [ 2142.163815] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2142.164029] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2142.164274] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1980f048-5a7c-488f-954a-6d84747d7d61 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.166436] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2142.166636] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2142.167356] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8063412-b5ee-4bfa-953b-625e139553ff {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.174431] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2142.174675] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54df787a-82e3-49b2-92ae-f85b06f0b73f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.176802] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2142.176966] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2142.177890] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4ee4545-3aad-4c9e-a7d3-5b6a3e3fb71f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.182350] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for the task: (returnval){ [ 2142.182350] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5285f39f-bd58-6646-9406-c865d04b02ba" [ 2142.182350] env[67008]: _type = "Task" [ 2142.182350] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.190107] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]5285f39f-bd58-6646-9406-c865d04b02ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.245310] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2142.245594] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2142.245708] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Deleting the datastore file [datastore1] fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2142.245967] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fb7bfaa-edf8-4044-ba47-acd13e67f418 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.251871] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for the task: (returnval){ [ 2142.251871] env[67008]: value = "task-2825031" [ 2142.251871] env[67008]: _type = "Task" [ 2142.251871] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.259336] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': task-2825031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.693050] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2142.693332] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating directory with path [datastore1] vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2142.693523] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04d2ce5a-884a-4e35-a753-fa5aa3d8ebf8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.704949] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Created directory with path [datastore1] vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2142.705147] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Fetch image to [datastore1] vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2142.705340] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2142.706072] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbdb9f5-3766-4629-ac31-e945e64c3b81 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.712295] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9dbbab-e16b-44c6-9ca0-2c120369fbaf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.720929] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fe089b-f506-4a0f-89ca-6d8a503fba69 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.752111] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa1d06a-0304-48e1-8b34-2d034b838c09 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.762584] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-af337daf-fcf6-42ab-8452-fed990f9d15c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.764218] env[67008]: DEBUG oslo_vmware.api [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Task: {'id': task-2825031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081208} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.764448] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2142.764626] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2142.764789] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2142.764960] env[67008]: INFO nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2142.767114] env[67008]: DEBUG nova.compute.claims [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2142.767281] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2142.767524] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2142.785664] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2142.837612] env[67008]: DEBUG oslo_vmware.rw_handles [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2142.896588] env[67008]: DEBUG oslo_vmware.rw_handles [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2142.896774] env[67008]: DEBUG oslo_vmware.rw_handles [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2142.971853] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0509209b-130f-4c38-b2d7-97b1b5ed15c9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.979206] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc144170-5f9b-4b69-9fba-d282b840d4a3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.008742] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda42b58-025d-4794-85b7-29c2a96536e4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.016025] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17085bb9-c5e8-4b9c-a562-8226180c5b7b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.028403] env[67008]: DEBUG nova.compute.provider_tree [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2143.037015] env[67008]: DEBUG nova.scheduler.client.report [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2143.050413] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.283s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2143.050936] env[67008]: ERROR nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2143.050936] env[67008]: Faults: ['InvalidArgument'] [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Traceback (most recent call last): [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self.driver.spawn(context, instance, image_meta, [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self._fetch_image_if_missing(context, vi) [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] image_cache(vi, tmp_image_ds_loc) [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] vm_util.copy_virtual_disk( [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] session._wait_for_task(vmdk_copy_task) [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] return self.wait_for_task(task_ref) [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] return evt.wait() [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] result = hub.switch() [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] return self.greenlet.switch() [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] self.f(*self.args, **self.kw) [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] raise exceptions.translate_fault(task_info.error) [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Faults: ['InvalidArgument'] [ 2143.050936] env[67008]: ERROR nova.compute.manager [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] [ 2143.051689] env[67008]: DEBUG nova.compute.utils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2143.053034] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Build of instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e was re-scheduled: A specified parameter was not correct: fileType [ 2143.053034] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2143.053412] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2143.053621] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2143.053778] env[67008]: DEBUG nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2143.053960] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2143.344272] env[67008]: DEBUG nova.network.neutron [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.366024] env[67008]: INFO nova.compute.manager [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Took 0.31 seconds to deallocate network for instance. [ 2143.459596] env[67008]: INFO nova.scheduler.client.report [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Deleted allocations for instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e [ 2143.480606] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9f6fb658-f666-4c02-8b00-9ece7ca0d56c tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 538.758s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2143.480870] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 367.448s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2143.481063] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] During sync_power_state the instance has a pending task (spawning). Skip. [ 2143.481233] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2143.481710] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 342.515s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2143.482113] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Acquiring lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2143.482204] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2143.482298] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2143.484413] env[67008]: INFO nova.compute.manager [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Terminating instance [ 2143.486217] env[67008]: DEBUG nova.compute.manager [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2143.486407] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2143.486665] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f291c33-f5c3-4e82-8626-b459f80f5e8f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.496121] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453de55b-7983-404d-ad7b-f849cf7ade21 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.522914] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e could not be found. [ 2143.523183] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2143.523308] env[67008]: INFO nova.compute.manager [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2143.523555] env[67008]: DEBUG oslo.service.loopingcall [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2143.524045] env[67008]: DEBUG nova.compute.manager [-] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2143.524155] env[67008]: DEBUG nova.network.neutron [-] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2143.545254] env[67008]: DEBUG nova.network.neutron [-] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.552643] env[67008]: INFO nova.compute.manager [-] [instance: fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e] Took 0.03 seconds to deallocate network for instance. [ 2143.636928] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0b69809d-dadf-454b-b1d6-362f858efa73 tempest-AttachInterfacesTestJSON-692762473 tempest-AttachInterfacesTestJSON-692762473-project-member] Lock "fbf3b5da-e09e-41ae-8d4a-ec53ebe6861e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2171.856393] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2173.857521] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2173.868524] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2173.868736] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2173.868914] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2173.869083] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2173.870301] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7f626f-addd-4e8e-a063-c5b964577c1a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.879130] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f319af1-2a22-4946-8551-1ad0de662642 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.894448] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1889d972-0365-40e5-af7f-bbbba414ee4d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.900857] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba018644-e01f-4721-8d0e-1cd4c294d51f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.929780] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181076MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2173.929929] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2173.930166] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2173.993101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2173.993101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2173.993101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2173.993101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 64ed3874-13e7-495e-9676-1757f27a1256 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2173.993101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2173.993101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2173.993101] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2173.993314] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2173.993365] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2174.079712] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d54c21-598d-4f3c-b875-6de8df2d7f3d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.087264] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b5818c-a772-41ca-9ac8-f5fa90aefe6e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.117392] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105311fd-ca3a-4850-9fbf-3fb4370b8577 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.123927] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a2cbc0-ced1-4d0e-97e5-e4be0eaa46a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.136459] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2174.146025] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2174.160112] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2174.160298] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.230s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2175.154506] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2176.856741] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2176.857113] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2177.857695] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.856826] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2180.858508] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2180.858785] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2180.858785] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2180.876940] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2180.877112] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2180.877249] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2180.877375] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2180.877497] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2180.877652] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2180.877779] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2180.877902] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2180.878387] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.857275] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.611736] env[67008]: WARNING oslo_vmware.rw_handles [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2191.611736] env[67008]: ERROR oslo_vmware.rw_handles [ 2191.612356] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2191.614184] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2191.614424] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Copying Virtual Disk [datastore1] vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/168a6e7d-0cb5-4ab4-b089-1cc9237a8547/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2191.614704] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49721d1b-b869-4b65-9462-6499bc03ed58 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.622620] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for the task: (returnval){ [ 2191.622620] env[67008]: value = "task-2825032" [ 2191.622620] env[67008]: _type = "Task" [ 2191.622620] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.630371] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Task: {'id': task-2825032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.134019] env[67008]: DEBUG oslo_vmware.exceptions [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2192.134325] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2192.134898] env[67008]: ERROR nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2192.134898] env[67008]: Faults: ['InvalidArgument'] [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Traceback (most recent call last): [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] yield resources [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self.driver.spawn(context, instance, image_meta, [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self._fetch_image_if_missing(context, vi) [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] image_cache(vi, tmp_image_ds_loc) [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] vm_util.copy_virtual_disk( [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] session._wait_for_task(vmdk_copy_task) [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] return self.wait_for_task(task_ref) [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] return evt.wait() [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] result = hub.switch() [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] return self.greenlet.switch() [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self.f(*self.args, **self.kw) [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] raise exceptions.translate_fault(task_info.error) [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Faults: ['InvalidArgument'] [ 2192.134898] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] [ 2192.135658] env[67008]: INFO nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Terminating instance [ 2192.136794] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2192.136986] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2192.137241] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c325d066-5ac9-4d13-805f-fb2c94d3ffc9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.139461] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2192.139656] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2192.140454] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4c43aa-1cb7-4348-bccb-2d94d934a6e6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.146994] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2192.147221] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-809a7255-045a-471f-9722-9246b8f1d6c8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.149308] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2192.149489] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2192.150407] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-903d533c-d34b-4c47-875a-cdd7fc8b5a49 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.154779] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Waiting for the task: (returnval){ [ 2192.154779] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]521d9b68-79b7-1d43-4c1a-9502fb7da7ad" [ 2192.154779] env[67008]: _type = "Task" [ 2192.154779] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.161810] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]521d9b68-79b7-1d43-4c1a-9502fb7da7ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.221066] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2192.221290] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2192.221469] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Deleting the datastore file [datastore1] 260ddcc7-b12c-46f9-9c98-df270b438cd2 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2192.221741] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a221ef6-e516-4eef-80d9-7c6bd8c4c2a2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.227274] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for the task: (returnval){ [ 2192.227274] env[67008]: value = "task-2825034" [ 2192.227274] env[67008]: _type = "Task" [ 2192.227274] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.234490] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Task: {'id': task-2825034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.665170] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2192.665492] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Creating directory with path [datastore1] vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2192.665660] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a313f72-3547-49c9-992c-c178e30e88b4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.676564] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Created directory with path [datastore1] vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2192.676745] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Fetch image to [datastore1] vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2192.676912] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2192.677611] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4007bb2c-2e54-4e44-888f-d8b75a7371a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.683673] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7088f52-29a0-495e-8cb0-91d79a9af05a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.693369] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8df0d29-f1fd-4c11-ab44-c0ca7d5a4fe1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.723008] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7035d8ae-2d03-4ec1-9d94-1075e3be570a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.730593] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-08fdfb50-9260-4bda-956a-3fd3ea609dd4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.736518] env[67008]: DEBUG oslo_vmware.api [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Task: {'id': task-2825034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.060908} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.736742] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2192.736916] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2192.737090] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2192.737263] env[67008]: INFO nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2192.739306] env[67008]: DEBUG nova.compute.claims [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2192.739472] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2192.739683] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2192.756030] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2192.874859] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f0b5b9-f92c-48f1-9465-3e8d12edad49 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.883492] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b2dee1-d4f1-4f62-86ab-7b32b1340dd2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.915392] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b997e7ca-52be-4714-9b68-33c7a3eb60ad {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.921986] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb00ba9-9356-40fd-aa05-1f3b46518358 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.934963] env[67008]: DEBUG nova.compute.provider_tree [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2192.937110] env[67008]: DEBUG oslo_vmware.rw_handles [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2192.992932] env[67008]: DEBUG nova.scheduler.client.report [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2192.998162] env[67008]: DEBUG oslo_vmware.rw_handles [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2192.998162] env[67008]: DEBUG oslo_vmware.rw_handles [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2193.006503] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.267s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2193.007687] env[67008]: ERROR nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2193.007687] env[67008]: Faults: ['InvalidArgument'] [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Traceback (most recent call last): [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self.driver.spawn(context, instance, image_meta, [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self._fetch_image_if_missing(context, vi) [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] image_cache(vi, tmp_image_ds_loc) [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] vm_util.copy_virtual_disk( [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] session._wait_for_task(vmdk_copy_task) [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] return self.wait_for_task(task_ref) [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] return evt.wait() [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] result = hub.switch() [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] return self.greenlet.switch() [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] self.f(*self.args, **self.kw) [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] raise exceptions.translate_fault(task_info.error) [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Faults: ['InvalidArgument'] [ 2193.007687] env[67008]: ERROR nova.compute.manager [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] [ 2193.008557] env[67008]: DEBUG nova.compute.utils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2193.009835] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Build of instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 was re-scheduled: A specified parameter was not correct: fileType [ 2193.009835] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2193.010245] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2193.010481] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2193.010606] env[67008]: DEBUG nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2193.010776] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2193.315813] env[67008]: DEBUG nova.network.neutron [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2193.328862] env[67008]: INFO nova.compute.manager [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Took 0.32 seconds to deallocate network for instance. [ 2193.420723] env[67008]: INFO nova.scheduler.client.report [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Deleted allocations for instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 [ 2193.443508] env[67008]: DEBUG oslo_concurrency.lockutils [None req-0716afb2-1eee-42e1-8695-cada978faf51 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 567.934s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2193.444140] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 417.410s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2193.444140] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] During sync_power_state the instance has a pending task (spawning). Skip. [ 2193.444420] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2193.444702] env[67008]: DEBUG oslo_concurrency.lockutils [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 371.489s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2193.444956] env[67008]: DEBUG oslo_concurrency.lockutils [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "260ddcc7-b12c-46f9-9c98-df270b438cd2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2193.445196] env[67008]: DEBUG oslo_concurrency.lockutils [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2193.445380] env[67008]: DEBUG oslo_concurrency.lockutils [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2193.447290] env[67008]: INFO nova.compute.manager [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Terminating instance [ 2193.449124] env[67008]: DEBUG nova.compute.manager [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2193.449376] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2193.449706] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11fb8740-dd20-41b7-b760-944f90b80408 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.459912] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e176f7e7-1635-4944-b034-d089b5f84d08 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.486111] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 260ddcc7-b12c-46f9-9c98-df270b438cd2 could not be found. [ 2193.486322] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2193.486501] env[67008]: INFO nova.compute.manager [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2193.486737] env[67008]: DEBUG oslo.service.loopingcall [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2193.487225] env[67008]: DEBUG nova.compute.manager [-] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2193.487329] env[67008]: DEBUG nova.network.neutron [-] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2193.508636] env[67008]: DEBUG nova.network.neutron [-] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2193.517271] env[67008]: INFO nova.compute.manager [-] [instance: 260ddcc7-b12c-46f9-9c98-df270b438cd2] Took 0.03 seconds to deallocate network for instance. [ 2193.598595] env[67008]: DEBUG oslo_concurrency.lockutils [None req-eb84e148-69c4-458a-8e6d-c56b8a15bcd9 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Lock "260ddcc7-b12c-46f9-9c98-df270b438cd2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.154s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2214.935146] env[67008]: DEBUG oslo_concurrency.lockutils [None req-f3154c73-3adf-463b-8209-8aa0c41de633 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquiring lock "bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2232.797843] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6f2794cd-9679-43fd-84be-71d89e3b4968 tempest-ServerShowV254Test-340302260 tempest-ServerShowV254Test-340302260-project-member] Acquiring lock "029e6a15-2f1d-42bf-b5ef-286e82ba7c0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2232.856789] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.857549] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2233.868790] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2233.869078] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2233.869256] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2233.869414] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2233.870857] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757b5120-23d2-436d-98e7-068db28221a9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.879245] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d99518-e520-4df3-8e90-3fc3d6126cc3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.893491] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8f3d5f-39be-453d-89c9-b1ff0ff8777f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.899289] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c671fd-9ee4-4e38-a821-d2fcefd52d65 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.928793] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181080MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2233.928963] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2233.929168] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2234.000289] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 5ecc1376-aab4-4b17-8746-39bed51edbba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2234.000445] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2234.000573] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 64ed3874-13e7-495e-9676-1757f27a1256 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2234.000696] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2234.000814] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2234.000933] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2234.001124] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2234.001267] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1344MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2234.072802] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80b94fd-77bf-4d1d-8379-d3a7fc025e7a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.080281] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1f6ed6-ca8f-4016-a963-914c0d41e1b0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.109262] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59ebfd5-f69a-4bf3-8be0-6fd0da522c13 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.116185] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2795ca-4b96-4c8e-8f5b-7e5f760981c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.129873] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2234.137898] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2234.153365] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2234.153543] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.224s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2236.149271] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2238.857210] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2238.857573] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2239.858212] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2240.340858] env[67008]: WARNING oslo_vmware.rw_handles [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2240.340858] env[67008]: ERROR oslo_vmware.rw_handles [ 2240.341431] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2240.343424] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2240.343680] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Copying Virtual Disk [datastore1] vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/6ebd54ae-a8f7-44dc-8955-f1b8eaac5f35/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2240.343968] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbb475e5-9df7-4950-a418-5a159bb717a8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.352483] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Waiting for the task: (returnval){ [ 2240.352483] env[67008]: value = "task-2825035" [ 2240.352483] env[67008]: _type = "Task" [ 2240.352483] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.359857] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Task: {'id': task-2825035, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.857466] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2240.863247] env[67008]: DEBUG oslo_vmware.exceptions [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2240.863543] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2240.864021] env[67008]: ERROR nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2240.864021] env[67008]: Faults: ['InvalidArgument'] [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Traceback (most recent call last): [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] yield resources [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self.driver.spawn(context, instance, image_meta, [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self._fetch_image_if_missing(context, vi) [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] image_cache(vi, tmp_image_ds_loc) [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] vm_util.copy_virtual_disk( [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] session._wait_for_task(vmdk_copy_task) [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] return self.wait_for_task(task_ref) [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] return evt.wait() [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] result = hub.switch() [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] return self.greenlet.switch() [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self.f(*self.args, **self.kw) [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] raise exceptions.translate_fault(task_info.error) [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Faults: ['InvalidArgument'] [ 2240.864021] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] [ 2240.864761] env[67008]: INFO nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Terminating instance [ 2240.865834] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2240.866050] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2240.866285] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbcaad94-724f-4744-8254-1abfcc9ec56f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.868344] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2240.868533] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2240.869270] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab8e7bb-7a35-4986-9b78-a5446366172a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.875866] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2240.876098] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1570529-19d2-4b59-b6c8-8a9dec5ad2b7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.878048] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2240.878228] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2240.879146] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294da11a-c199-4c86-a309-ab07e3cb6acf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.883541] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2240.883541] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]524d4959-8082-6ca4-d4ba-2f1eb032b85b" [ 2240.883541] env[67008]: _type = "Task" [ 2240.883541] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.890417] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]524d4959-8082-6ca4-d4ba-2f1eb032b85b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.939456] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2240.939655] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2240.939832] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Deleting the datastore file [datastore1] 5ecc1376-aab4-4b17-8746-39bed51edbba {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2240.940112] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-492fa9a0-8d77-4b4c-bfe3-5fbff20dd16b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.945939] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Waiting for the task: (returnval){ [ 2240.945939] env[67008]: value = "task-2825037" [ 2240.945939] env[67008]: _type = "Task" [ 2240.945939] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.953363] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Task: {'id': task-2825037, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.393550] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2241.393662] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating directory with path [datastore1] vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2241.393832] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16699558-f5fd-4765-99c5-fb5da42b51c3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.404305] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created directory with path [datastore1] vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2241.404485] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Fetch image to [datastore1] vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2241.404677] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2241.405354] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357aa779-604e-4e18-bd7d-76faaa851849 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.411560] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22581497-8922-4fba-8161-faedac0f874f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.420065] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5859127-db53-45fd-a9f7-d6e3e849070f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.453234] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04de7070-cc2c-4e90-b1b0-34b98ef7c444 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.459633] env[67008]: DEBUG oslo_vmware.api [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Task: {'id': task-2825037, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078086} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.461073] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2241.461270] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2241.461443] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2241.461620] env[67008]: INFO nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2241.463338] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0e59118f-d9eb-4a32-a865-3a8efe25a7a6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.465170] env[67008]: DEBUG nova.compute.claims [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2241.465343] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2241.465552] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2241.489338] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2241.565919] env[67008]: DEBUG oslo_vmware.rw_handles [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2241.625285] env[67008]: DEBUG oslo_vmware.rw_handles [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2241.625476] env[67008]: DEBUG oslo_vmware.rw_handles [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2241.634354] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdc4a38-1c14-4501-8215-23196cf27f84 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.641315] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0388f9d-2c3b-426f-9542-ec3445cb8445 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.670253] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce4900e-06ee-4025-a12e-618f51cdaf92 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.676711] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130f4af2-95d3-465c-a8fe-ed242c12ecb8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.689519] env[67008]: DEBUG nova.compute.provider_tree [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2241.697666] env[67008]: DEBUG nova.scheduler.client.report [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2241.710847] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.245s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2241.711326] env[67008]: ERROR nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2241.711326] env[67008]: Faults: ['InvalidArgument'] [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Traceback (most recent call last): [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self.driver.spawn(context, instance, image_meta, [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self._fetch_image_if_missing(context, vi) [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] image_cache(vi, tmp_image_ds_loc) [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] vm_util.copy_virtual_disk( [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] session._wait_for_task(vmdk_copy_task) [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] return self.wait_for_task(task_ref) [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] return evt.wait() [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] result = hub.switch() [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] return self.greenlet.switch() [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] self.f(*self.args, **self.kw) [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] raise exceptions.translate_fault(task_info.error) [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Faults: ['InvalidArgument'] [ 2241.711326] env[67008]: ERROR nova.compute.manager [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] [ 2241.712074] env[67008]: DEBUG nova.compute.utils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2241.713352] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Build of instance 5ecc1376-aab4-4b17-8746-39bed51edbba was re-scheduled: A specified parameter was not correct: fileType [ 2241.713352] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2241.713716] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2241.713887] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2241.714067] env[67008]: DEBUG nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2241.714237] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2241.856458] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2241.856631] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2241.856755] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2241.872441] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2241.872715] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2241.872715] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2241.872836] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2241.872959] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2241.873096] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2241.873525] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2242.001369] env[67008]: DEBUG nova.network.neutron [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2242.013391] env[67008]: INFO nova.compute.manager [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Took 0.30 seconds to deallocate network for instance. [ 2242.105112] env[67008]: INFO nova.scheduler.client.report [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Deleted allocations for instance 5ecc1376-aab4-4b17-8746-39bed51edbba [ 2242.125839] env[67008]: DEBUG oslo_concurrency.lockutils [None req-d3734e9b-a8ce-4cff-98a2-ef2502c00bad tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 598.804s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.126110] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 466.093s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2242.126304] env[67008]: INFO nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] During sync_power_state the instance has a pending task (spawning). Skip. [ 2242.126477] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.126696] env[67008]: DEBUG oslo_concurrency.lockutils [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 403.841s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2242.126899] env[67008]: DEBUG oslo_concurrency.lockutils [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Acquiring lock "5ecc1376-aab4-4b17-8746-39bed51edbba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2242.127117] env[67008]: DEBUG oslo_concurrency.lockutils [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2242.127285] env[67008]: DEBUG oslo_concurrency.lockutils [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.129841] env[67008]: INFO nova.compute.manager [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Terminating instance [ 2242.131696] env[67008]: DEBUG nova.compute.manager [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2242.131890] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2242.132371] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca9c43bf-0154-4997-b930-6f61166f258f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.141159] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f53d4e-dc79-4cdc-b580-d374f28b2d16 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.166295] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5ecc1376-aab4-4b17-8746-39bed51edbba could not be found. [ 2242.166508] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2242.166684] env[67008]: INFO nova.compute.manager [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2242.166918] env[67008]: DEBUG oslo.service.loopingcall [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2242.167153] env[67008]: DEBUG nova.compute.manager [-] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2242.167250] env[67008]: DEBUG nova.network.neutron [-] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2242.189638] env[67008]: DEBUG nova.network.neutron [-] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2242.197309] env[67008]: INFO nova.compute.manager [-] [instance: 5ecc1376-aab4-4b17-8746-39bed51edbba] Took 0.03 seconds to deallocate network for instance. [ 2242.279676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-36a18f30-b092-47f7-b5ce-a2cb970e1c10 tempest-DeleteServersTestJSON-1133260470 tempest-DeleteServersTestJSON-1133260470-project-member] Lock "5ecc1376-aab4-4b17-8746-39bed51edbba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.153s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2242.856447] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2248.854061] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.329699] env[67008]: DEBUG oslo_concurrency.lockutils [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "64ed3874-13e7-495e-9676-1757f27a1256" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2286.694432] env[67008]: WARNING oslo_vmware.rw_handles [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2286.694432] env[67008]: ERROR oslo_vmware.rw_handles [ 2286.695220] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2286.696907] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2286.697198] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Copying Virtual Disk [datastore1] vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/327a107e-29c0-4929-8fe5-2ee0bdd1e81c/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2286.697480] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cbb44d2-e2b3-4e16-bd9d-aee6608d9403 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.706870] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2286.706870] env[67008]: value = "task-2825038" [ 2286.706870] env[67008]: _type = "Task" [ 2286.706870] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.714156] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.217431] env[67008]: DEBUG oslo_vmware.exceptions [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2287.217763] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2287.218342] env[67008]: ERROR nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2287.218342] env[67008]: Faults: ['InvalidArgument'] [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Traceback (most recent call last): [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] yield resources [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self.driver.spawn(context, instance, image_meta, [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self._fetch_image_if_missing(context, vi) [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] image_cache(vi, tmp_image_ds_loc) [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] vm_util.copy_virtual_disk( [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] session._wait_for_task(vmdk_copy_task) [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] return self.wait_for_task(task_ref) [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] return evt.wait() [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] result = hub.switch() [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] return self.greenlet.switch() [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self.f(*self.args, **self.kw) [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] raise exceptions.translate_fault(task_info.error) [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Faults: ['InvalidArgument'] [ 2287.218342] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] [ 2287.219306] env[67008]: INFO nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Terminating instance [ 2287.220478] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2287.220478] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2287.220808] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce8b96fb-803a-4636-970c-62c2aa58eba7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.222909] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2287.223117] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2287.223909] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93bf461c-7d69-4e65-b4a8-f2d7a81af30f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.230342] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2287.230568] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf24a567-a4c7-4096-b5ac-dc74c931cbce {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.232552] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2287.232723] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2287.233651] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76893971-0d46-4a54-a9ec-1caf844c7424 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.238786] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2287.238786] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e3fb3f-c8d5-a23f-033d-58988dc4afe5" [ 2287.238786] env[67008]: _type = "Task" [ 2287.238786] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.252828] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2287.253132] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating directory with path [datastore1] vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2287.253349] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c84fc2fd-c74f-45ab-b7f1-7f3acf7f50cd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.273051] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created directory with path [datastore1] vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2287.273251] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Fetch image to [datastore1] vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2287.273430] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2287.274160] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b797362-dd25-44fa-8876-a78fd04dafe6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.280635] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e461404-9b3d-4a78-9278-6669868fc6c4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.289512] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2751d554-9e5d-4164-a1ef-26549d4d3f59 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.321589] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd1e278-b165-46f3-abdf-eb52626b693c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.324072] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2287.324269] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2287.324436] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleting the datastore file [datastore1] 64ed3874-13e7-495e-9676-1757f27a1256 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2287.324658] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28fbce91-cb2c-4270-8fee-c5eb882c32d7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.329193] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7c5f84f5-277a-4dca-a749-9c745f2730fd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.332153] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2287.332153] env[67008]: value = "task-2825040" [ 2287.332153] env[67008]: _type = "Task" [ 2287.332153] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.339150] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.354049] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2287.403496] env[67008]: DEBUG oslo_vmware.rw_handles [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2287.462217] env[67008]: DEBUG oslo_vmware.rw_handles [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2287.462416] env[67008]: DEBUG oslo_vmware.rw_handles [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2287.842290] env[67008]: DEBUG oslo_vmware.api [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068032} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.842617] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2287.842719] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2287.842867] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2287.843054] env[67008]: INFO nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2287.845084] env[67008]: DEBUG nova.compute.claims [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2287.845258] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2287.845466] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2287.958045] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b4e376-8730-4f1b-87df-b09622bbb5f3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.965318] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6700cf18-93fb-42da-b7b8-94eb35f56d99 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.994852] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27c3276-aeb2-4103-854c-7e7101fbed38 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.002386] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f35e7d5-51fa-4a00-8325-18196cffdcbe {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.016342] env[67008]: DEBUG nova.compute.provider_tree [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2288.026271] env[67008]: DEBUG nova.scheduler.client.report [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2288.039951] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.194s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2288.040506] env[67008]: ERROR nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2288.040506] env[67008]: Faults: ['InvalidArgument'] [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Traceback (most recent call last): [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self.driver.spawn(context, instance, image_meta, [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self._fetch_image_if_missing(context, vi) [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] image_cache(vi, tmp_image_ds_loc) [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] vm_util.copy_virtual_disk( [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] session._wait_for_task(vmdk_copy_task) [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] return self.wait_for_task(task_ref) [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] return evt.wait() [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] result = hub.switch() [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] return self.greenlet.switch() [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] self.f(*self.args, **self.kw) [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] raise exceptions.translate_fault(task_info.error) [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Faults: ['InvalidArgument'] [ 2288.040506] env[67008]: ERROR nova.compute.manager [instance: 64ed3874-13e7-495e-9676-1757f27a1256] [ 2288.041279] env[67008]: DEBUG nova.compute.utils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2288.042590] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Build of instance 64ed3874-13e7-495e-9676-1757f27a1256 was re-scheduled: A specified parameter was not correct: fileType [ 2288.042590] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2288.042960] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2288.043150] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2288.043318] env[67008]: DEBUG nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2288.043477] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2288.344244] env[67008]: DEBUG nova.network.neutron [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2288.358442] env[67008]: INFO nova.compute.manager [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Took 0.31 seconds to deallocate network for instance. [ 2288.448511] env[67008]: INFO nova.scheduler.client.report [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleted allocations for instance 64ed3874-13e7-495e-9676-1757f27a1256 [ 2288.467136] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ea69ff2c-fd34-4b40-808f-79d5eb3d828c tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "64ed3874-13e7-495e-9676-1757f27a1256" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 416.812s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2288.467416] env[67008]: DEBUG oslo_concurrency.lockutils [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "64ed3874-13e7-495e-9676-1757f27a1256" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 24.138s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2288.467775] env[67008]: DEBUG oslo_concurrency.lockutils [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "64ed3874-13e7-495e-9676-1757f27a1256-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2288.467887] env[67008]: DEBUG oslo_concurrency.lockutils [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "64ed3874-13e7-495e-9676-1757f27a1256-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2288.468129] env[67008]: DEBUG oslo_concurrency.lockutils [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "64ed3874-13e7-495e-9676-1757f27a1256-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2288.470033] env[67008]: INFO nova.compute.manager [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Terminating instance [ 2288.471791] env[67008]: DEBUG nova.compute.manager [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2288.471989] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2288.472484] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-608e8015-1d6f-471d-91bb-5727bf010111 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.481604] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c2fb39-32e8-48b4-be13-e8150e4b36d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.507204] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 64ed3874-13e7-495e-9676-1757f27a1256 could not be found. [ 2288.507409] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2288.507585] env[67008]: INFO nova.compute.manager [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2288.507820] env[67008]: DEBUG oslo.service.loopingcall [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2288.508052] env[67008]: DEBUG nova.compute.manager [-] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2288.508154] env[67008]: DEBUG nova.network.neutron [-] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2288.532163] env[67008]: DEBUG nova.network.neutron [-] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2288.539817] env[67008]: INFO nova.compute.manager [-] [instance: 64ed3874-13e7-495e-9676-1757f27a1256] Took 0.03 seconds to deallocate network for instance. [ 2288.625778] env[67008]: DEBUG oslo_concurrency.lockutils [None req-db9f2d95-a067-4264-b2bf-ba65f9bf2678 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "64ed3874-13e7-495e-9676-1757f27a1256" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.158s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2288.827518] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "54e33842-40a5-48e5-8813-f2da4f9fc152" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2292.856545] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2294.856963] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2294.868927] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2294.869165] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2294.869330] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2294.869487] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2294.870627] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b678b24-5aa1-462c-9ea7-31c75ac8758b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.879280] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7e6c9b-b2bd-4b91-8809-d04b9fc9d5d8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.892764] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d22359-fd4c-442f-b391-fa06a13599c6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.898704] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f12402-3af5-44b2-9a15-3044f6041385 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.926792] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181078MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2294.926930] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2294.927131] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2294.977755] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2294.977908] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2294.978047] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2294.978171] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2294.978359] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2294.978572] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2295.031910] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6692526-038e-482c-bbd9-0eaeca4c4d46 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.038985] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa868b4-e9ca-4705-a0cc-50c834497547 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.068455] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6030a0-af7e-47b0-9ca9-26061c5624e2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.074780] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e57a25e-2fee-42da-90e8-ef791e55ef81 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.087177] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2295.094822] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2295.108734] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2295.108879] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.182s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2298.104506] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.857306] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.857581] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.857713] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2301.856456] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2301.856654] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2301.856776] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2301.869992] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2301.870267] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2301.870301] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2301.870412] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2301.870530] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2301.871016] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2303.856888] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2303.857244] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2335.013041] env[67008]: WARNING oslo_vmware.rw_handles [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2335.013041] env[67008]: ERROR oslo_vmware.rw_handles [ 2335.013563] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2335.015635] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2335.015874] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Copying Virtual Disk [datastore1] vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/f803736c-74b6-4241-9b7d-f936930f1316/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2335.016163] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e44955d-70c5-4afb-9a83-85451ea1a480 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.024713] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2335.024713] env[67008]: value = "task-2825041" [ 2335.024713] env[67008]: _type = "Task" [ 2335.024713] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.032640] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.534714] env[67008]: DEBUG oslo_vmware.exceptions [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2335.534994] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2335.535569] env[67008]: ERROR nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.535569] env[67008]: Faults: ['InvalidArgument'] [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Traceback (most recent call last): [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] yield resources [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self.driver.spawn(context, instance, image_meta, [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self._fetch_image_if_missing(context, vi) [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] image_cache(vi, tmp_image_ds_loc) [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] vm_util.copy_virtual_disk( [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] session._wait_for_task(vmdk_copy_task) [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] return self.wait_for_task(task_ref) [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] return evt.wait() [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] result = hub.switch() [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] return self.greenlet.switch() [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self.f(*self.args, **self.kw) [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] raise exceptions.translate_fault(task_info.error) [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Faults: ['InvalidArgument'] [ 2335.535569] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] [ 2335.536646] env[67008]: INFO nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Terminating instance [ 2335.537414] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2335.537618] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2335.537848] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99fc2190-a629-4e82-bfdf-7a4403d282ab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.539968] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2335.540168] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2335.540899] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a8afef-9728-4015-91df-c497401dfcf4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.547467] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2335.547714] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0baf8965-7e7d-47f1-a863-a698f0f67a1d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.549769] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2335.549940] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2335.550877] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434b2e94-3f86-4eb6-bf9e-c71cc3e4b7b3 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.555196] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2335.555196] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520b51a2-52f2-acb2-b2f2-59f2c79a6ed1" [ 2335.555196] env[67008]: _type = "Task" [ 2335.555196] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.562944] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]520b51a2-52f2-acb2-b2f2-59f2c79a6ed1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.624765] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2335.625012] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2335.625201] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleting the datastore file [datastore1] 54e33842-40a5-48e5-8813-f2da4f9fc152 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2335.625459] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d78d49a-39e5-486a-b7c4-863ce867787b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.631047] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2335.631047] env[67008]: value = "task-2825043" [ 2335.631047] env[67008]: _type = "Task" [ 2335.631047] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.638196] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2336.065119] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2336.065442] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating directory with path [datastore1] vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2336.065681] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9136aca1-ac83-4311-a913-5a3e09472335 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.076977] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Created directory with path [datastore1] vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2336.077217] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Fetch image to [datastore1] vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2336.077430] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2336.078221] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537500bf-f33f-41c0-872c-995557840df1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.084454] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1d0d7a-ab7f-4ef0-a1f8-21433bc6f86b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.093437] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282ee100-1be4-4b9d-819c-0e6f35cb59bf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.123823] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2634a3fb-0b02-4c18-a71f-4f85d7a46e3b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.129434] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9f13c43b-b6a0-45d3-83fd-9d84a26bb498 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.138524] env[67008]: DEBUG oslo_vmware.api [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063206} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2336.138758] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2336.138992] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2336.139282] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2336.139538] env[67008]: INFO nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2336.141871] env[67008]: DEBUG nova.compute.claims [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2336.142085] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2336.142374] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2336.152088] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2336.201960] env[67008]: DEBUG oslo_vmware.rw_handles [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2336.260249] env[67008]: DEBUG oslo_vmware.rw_handles [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2336.260463] env[67008]: DEBUG oslo_vmware.rw_handles [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2336.291425] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0183852-8363-4597-a90a-84e7762050fd {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.298402] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4a6251-faa7-4d49-a3d9-eea3e29c1323 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.329185] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e371db-95f7-4a98-a3f5-2078cce5e112 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.336223] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab7eb38-9e5b-49cf-89b1-7f22f0f24537 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.348910] env[67008]: DEBUG nova.compute.provider_tree [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2336.357446] env[67008]: DEBUG nova.scheduler.client.report [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2336.369336] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.227s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2336.369864] env[67008]: ERROR nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2336.369864] env[67008]: Faults: ['InvalidArgument'] [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Traceback (most recent call last): [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self.driver.spawn(context, instance, image_meta, [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self._fetch_image_if_missing(context, vi) [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] image_cache(vi, tmp_image_ds_loc) [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] vm_util.copy_virtual_disk( [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] session._wait_for_task(vmdk_copy_task) [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] return self.wait_for_task(task_ref) [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] return evt.wait() [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] result = hub.switch() [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] return self.greenlet.switch() [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] self.f(*self.args, **self.kw) [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] raise exceptions.translate_fault(task_info.error) [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Faults: ['InvalidArgument'] [ 2336.369864] env[67008]: ERROR nova.compute.manager [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] [ 2336.370663] env[67008]: DEBUG nova.compute.utils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2336.371938] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Build of instance 54e33842-40a5-48e5-8813-f2da4f9fc152 was re-scheduled: A specified parameter was not correct: fileType [ 2336.371938] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2336.372318] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2336.372494] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2336.372684] env[67008]: DEBUG nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2336.372862] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2336.669456] env[67008]: DEBUG nova.network.neutron [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2336.680192] env[67008]: INFO nova.compute.manager [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Took 0.31 seconds to deallocate network for instance. [ 2336.773111] env[67008]: INFO nova.scheduler.client.report [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleted allocations for instance 54e33842-40a5-48e5-8813-f2da4f9fc152 [ 2336.791625] env[67008]: DEBUG oslo_concurrency.lockutils [None req-6fa98375-8edc-4fa2-ae9e-bedb2baf31b4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 465.448s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2336.791891] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 47.965s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2336.792149] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "54e33842-40a5-48e5-8813-f2da4f9fc152-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2336.792363] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2336.792549] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2336.794443] env[67008]: INFO nova.compute.manager [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Terminating instance [ 2336.796165] env[67008]: DEBUG nova.compute.manager [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2336.796361] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2336.796826] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc41589b-d6cc-4457-904b-8cd1f7cf583b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.806674] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5662f99-254c-4516-8248-04c4589ed342 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.831634] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 54e33842-40a5-48e5-8813-f2da4f9fc152 could not be found. [ 2336.831846] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2336.832036] env[67008]: INFO nova.compute.manager [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2336.832279] env[67008]: DEBUG oslo.service.loopingcall [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2336.832486] env[67008]: DEBUG nova.compute.manager [-] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2336.832606] env[67008]: DEBUG nova.network.neutron [-] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2336.854732] env[67008]: DEBUG nova.network.neutron [-] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2336.862846] env[67008]: INFO nova.compute.manager [-] [instance: 54e33842-40a5-48e5-8813-f2da4f9fc152] Took 0.03 seconds to deallocate network for instance. [ 2336.945989] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ffdf733a-e2ce-4e63-bc7d-5ad07ab144a4 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "54e33842-40a5-48e5-8813-f2da4f9fc152" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.154s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2341.857387] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.872783] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.856889] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager.update_available_resource {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2355.880048] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2355.880048] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2355.880048] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2355.880048] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=67008) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2355.881279] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd5e398-ad2a-433b-9e19-f42c3972ee53 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.889968] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43960f36-9cb4-47d2-9d26-56d9c0ed3469 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.903371] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a83706a-a23f-4fc7-bc7f-681bd2257aa4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.909249] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356f547a-319a-4c34-ba64-ccd4a4cf0b24 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.939901] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181071MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=67008) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2355.940078] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2355.940237] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2356.163996] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.164194] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.164319] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Instance 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=67008) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.164497] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2356.164637] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=960MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=67008) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2356.213273] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a869e6a4-e24c-4496-8698-df6630d40935 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.220904] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0b3dcd-525f-4c00-a254-9d3c7d335f95 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.249485] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59fef99-3327-477d-88e1-1941e55225ae {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.256406] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc2132c-8719-45b0-887c-06a2197fc027 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.268867] env[67008]: DEBUG nova.compute.provider_tree [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2356.276897] env[67008]: DEBUG nova.scheduler.client.report [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2356.297904] env[67008]: DEBUG nova.compute.resource_tracker [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=67008) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2356.298107] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.358s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2358.293854] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2360.002590] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2360.003139] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Getting list of instances from cluster (obj){ [ 2360.003139] env[67008]: value = "domain-c8" [ 2360.003139] env[67008]: _type = "ClusterComputeResource" [ 2360.003139] env[67008]: } {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2360.004246] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0101618-db62-4e4a-a6af-84c9ee58d5c1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.016642] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Got total of 3 instances {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2360.307603] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquiring lock "4c046377-d7d1-424a-866e-803354a29a26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2360.307829] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "4c046377-d7d1-424a-866e-803354a29a26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2360.330595] env[67008]: DEBUG nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2360.381878] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2360.382133] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2360.383547] env[67008]: INFO nova.compute.claims [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2360.486374] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf627c6-ea30-4473-a554-0b40bdc7da08 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.493689] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1afb23b-9cc7-4a94-ac08-4254155cd3e8 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.522675] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcc33aa-88fd-4ed8-941f-31de296e6096 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.529181] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4618bbb-8204-4c44-a67a-21bcf28df09d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.541602] env[67008]: DEBUG nova.compute.provider_tree [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2360.549800] env[67008]: DEBUG nova.scheduler.client.report [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2360.566227] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.184s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2360.566546] env[67008]: DEBUG nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2360.599095] env[67008]: DEBUG nova.compute.utils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2360.600208] env[67008]: DEBUG nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2360.600390] env[67008]: DEBUG nova.network.neutron [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2360.609579] env[67008]: DEBUG nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2360.642881] env[67008]: INFO nova.virt.block_device [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Booting with volume f9ace753-6dc1-4c53-b3b0-1613d1027785 at /dev/sda [ 2360.701970] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69fd440c-aa3c-45dc-9f55-6db7950a5761 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.710691] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fc0bb1-3412-4dd6-8592-f23bf4a6ea2b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.734679] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0adbdbb1-0a0f-41fc-923c-16e44f8c3769 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.737391] env[67008]: DEBUG nova.policy [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77d6bcd8fd4d49549d7dd33046a2c65c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38b21c167e38410a9e22df2f7436b252', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 2360.744415] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856b4d12-4078-4779-a769-f65aeec14a20 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.768562] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26ebe55-6a93-4a88-8f43-20db566f1068 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.774675] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecd62b0-e4b5-45fb-9aad-ad66cf4ddb76 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.787530] env[67008]: DEBUG nova.virt.block_device [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updating existing volume attachment record: a9faba82-d3bd-4918-addc-870d5527f58c {{(pid=67008) _volume_attach /opt/stack/nova/nova/virt/block_device.py:631}} [ 2360.905528] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2360.905868] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2360.906099] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=67008) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2361.070995] env[67008]: DEBUG nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2361.071544] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2361.071788] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2361.071950] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2361.072149] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2361.072298] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2361.072444] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2361.072645] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2361.072800] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2361.072973] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2361.073276] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2361.073467] env[67008]: DEBUG nova.virt.hardware [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2361.074804] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2acef28-bed4-4839-bcbe-963352ea2f43 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.083146] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c257a639-9e8d-4824-ad02-af7024e04c50 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.166776] env[67008]: DEBUG nova.network.neutron [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Successfully created port: 8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2361.674129] env[67008]: DEBUG nova.compute.manager [req-0dd5b743-044d-48c7-af94-aef323031bad req-9883a027-368e-4869-a44b-2772fe93dd32 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Received event network-vif-plugged-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2361.674129] env[67008]: DEBUG oslo_concurrency.lockutils [req-0dd5b743-044d-48c7-af94-aef323031bad req-9883a027-368e-4869-a44b-2772fe93dd32 service nova] Acquiring lock "4c046377-d7d1-424a-866e-803354a29a26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2361.674129] env[67008]: DEBUG oslo_concurrency.lockutils [req-0dd5b743-044d-48c7-af94-aef323031bad req-9883a027-368e-4869-a44b-2772fe93dd32 service nova] Lock "4c046377-d7d1-424a-866e-803354a29a26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2361.674129] env[67008]: DEBUG oslo_concurrency.lockutils [req-0dd5b743-044d-48c7-af94-aef323031bad req-9883a027-368e-4869-a44b-2772fe93dd32 service nova] Lock "4c046377-d7d1-424a-866e-803354a29a26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2361.674129] env[67008]: DEBUG nova.compute.manager [req-0dd5b743-044d-48c7-af94-aef323031bad req-9883a027-368e-4869-a44b-2772fe93dd32 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] No waiting events found dispatching network-vif-plugged-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2361.674547] env[67008]: WARNING nova.compute.manager [req-0dd5b743-044d-48c7-af94-aef323031bad req-9883a027-368e-4869-a44b-2772fe93dd32 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Received unexpected event network-vif-plugged-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e for instance with vm_state building and task_state spawning. [ 2361.749039] env[67008]: DEBUG nova.network.neutron [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Successfully updated port: 8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2361.775801] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquiring lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2361.775952] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquired lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2361.776120] env[67008]: DEBUG nova.network.neutron [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2361.812963] env[67008]: DEBUG nova.network.neutron [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2361.858889] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.859083] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Starting heal instance info cache {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2361.859208] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Rebuilding the list of instances to heal {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2361.877900] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2361.878036] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2361.878160] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2361.878285] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Skipping network cache update for instance because it is Building. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2361.878579] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Didn't find any instances for network info cache update. {{(pid=67008) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2361.979665] env[67008]: DEBUG nova.network.neutron [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updating instance_info_cache with network_info: [{"id": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "address": "fa:16:3e:19:ee:11", "network": {"id": "93abbf34-c608-43ef-8b96-fbfa7600ec91", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1493811503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38b21c167e38410a9e22df2f7436b252", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5edd9e-52", "ovs_interfaceid": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2362.032881] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Releasing lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2362.033293] env[67008]: DEBUG nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Instance network_info: |[{"id": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "address": "fa:16:3e:19:ee:11", "network": {"id": "93abbf34-c608-43ef-8b96-fbfa7600ec91", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1493811503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38b21c167e38410a9e22df2f7436b252", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5edd9e-52", "ovs_interfaceid": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2362.033706] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:ee:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '777870ab-362f-4a17-9c1c-8d9cc26cd4ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c5edd9e-52b1-40b7-bdd4-9ec61dda167e', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2362.041445] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Creating folder: Project (38b21c167e38410a9e22df2f7436b252). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2362.042013] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-549c2a48-071b-4718-96ec-366b9d01db2d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.054717] env[67008]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2362.054886] env[67008]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=67008) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2362.055205] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Folder already exists: Project (38b21c167e38410a9e22df2f7436b252). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2362.055392] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Creating folder: Instances. Parent ref: group-v568098. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2362.055613] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b19d969-e991-4ff4-afc3-d26b08b790c5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.064115] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Created folder: Instances in parent group-v568098. [ 2362.064341] env[67008]: DEBUG oslo.service.loopingcall [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2362.064528] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2362.064729] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b2e6e39-683b-4035-b8e4-d3074a96b3df {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.083167] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2362.083167] env[67008]: value = "task-2825056" [ 2362.083167] env[67008]: _type = "Task" [ 2362.083167] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2362.090497] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825056, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.593400] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825056, 'name': CreateVM_Task} progress is 25%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2363.093139] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825056, 'name': CreateVM_Task, 'duration_secs': 0.847058} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2363.093473] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2363.093944] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'delete_on_termination': True, 'disk_bus': None, 'attachment_id': 'a9faba82-d3bd-4918-addc-870d5527f58c', 'guest_format': None, 'device_type': None, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-568101', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'name': 'volume-f9ace753-6dc1-4c53-b3b0-1613d1027785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4c046377-d7d1-424a-866e-803354a29a26', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'serial': 'f9ace753-6dc1-4c53-b3b0-1613d1027785'}, 'volume_type': None}], 'swap': None} {{(pid=67008) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2363.094190] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Root volume attach. Driver type: vmdk {{(pid=67008) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2363.094923] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ac6e3a-36ff-4406-902d-8c923f21d343 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.103066] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf12449-8ed9-4a9c-87fa-5043e6e31fa6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.110259] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ff8ff2-3770-4889-ac11-b03de5dfdf1d {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.116443] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-3a565239-4040-4250-9ee7-f3581efc1d3f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.122345] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2363.122345] env[67008]: value = "task-2825057" [ 2363.122345] env[67008]: _type = "Task" [ 2363.122345] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2363.129523] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825057, 'name': RelocateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2363.632328] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825057, 'name': RelocateVM_Task} progress is 19%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2363.702962] env[67008]: DEBUG nova.compute.manager [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Received event network-changed-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2363.703219] env[67008]: DEBUG nova.compute.manager [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Refreshing instance network info cache due to event network-changed-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2363.703388] env[67008]: DEBUG oslo_concurrency.lockutils [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] Acquiring lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2363.703551] env[67008]: DEBUG oslo_concurrency.lockutils [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] Acquired lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2363.703678] env[67008]: DEBUG nova.network.neutron [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Refreshing network info cache for port 8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2363.856267] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.856501] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2364.111214] env[67008]: DEBUG nova.network.neutron [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updated VIF entry in instance network info cache for port 8c5edd9e-52b1-40b7-bdd4-9ec61dda167e. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2364.111580] env[67008]: DEBUG nova.network.neutron [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updating instance_info_cache with network_info: [{"id": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "address": "fa:16:3e:19:ee:11", "network": {"id": "93abbf34-c608-43ef-8b96-fbfa7600ec91", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1493811503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38b21c167e38410a9e22df2f7436b252", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5edd9e-52", "ovs_interfaceid": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2364.120471] env[67008]: DEBUG oslo_concurrency.lockutils [req-62f3d999-42a7-4f50-8cfb-f849af901b8b req-f1b0d3b6-6a82-4d05-a2e9-17b8d55d23c7 service nova] Releasing lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2364.133379] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825057, 'name': RelocateVM_Task} progress is 20%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2364.634574] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825057, 'name': RelocateVM_Task, 'duration_secs': 1.125249} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2364.634850] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Volume attach. Driver type: vmdk {{(pid=67008) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2364.635067] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-568101', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'name': 'volume-f9ace753-6dc1-4c53-b3b0-1613d1027785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4c046377-d7d1-424a-866e-803354a29a26', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'serial': 'f9ace753-6dc1-4c53-b3b0-1613d1027785'} {{(pid=67008) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2364.635797] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b57296-7c90-4c49-b14f-7f3e26610396 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.651836] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cde7e2b-533e-4380-b151-6d18c265d6b9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.673042] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] volume-f9ace753-6dc1-4c53-b3b0-1613d1027785/volume-f9ace753-6dc1-4c53-b3b0-1613d1027785.vmdk or device None with type thin {{(pid=67008) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2364.673271] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cae26306-29ad-41d0-932c-866482133c49 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.691650] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2364.691650] env[67008]: value = "task-2825058" [ 2364.691650] env[67008]: _type = "Task" [ 2364.691650] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2364.698806] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825058, 'name': ReconfigVM_Task} progress is 5%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.203303] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825058, 'name': ReconfigVM_Task} progress is 14%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.701816] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825058, 'name': ReconfigVM_Task, 'duration_secs': 0.931112} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2365.702104] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Reconfigured VM instance instance-00000060 to attach disk [datastore2] volume-f9ace753-6dc1-4c53-b3b0-1613d1027785/volume-f9ace753-6dc1-4c53-b3b0-1613d1027785.vmdk or device None with type thin {{(pid=67008) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2365.706612] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2824751-b444-4d0a-9254-ed98a8ce3a7b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.721456] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2365.721456] env[67008]: value = "task-2825059" [ 2365.721456] env[67008]: _type = "Task" [ 2365.721456] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2365.728943] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.857082] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2366.231282] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825059, 'name': ReconfigVM_Task, 'duration_secs': 0.237878} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2366.231709] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-568101', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'name': 'volume-f9ace753-6dc1-4c53-b3b0-1613d1027785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4c046377-d7d1-424a-866e-803354a29a26', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'serial': 'f9ace753-6dc1-4c53-b3b0-1613d1027785'} {{(pid=67008) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2366.232145] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ed8dd14-9005-478c-bbcf-46f90d8fce01 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.238403] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2366.238403] env[67008]: value = "task-2825060" [ 2366.238403] env[67008]: _type = "Task" [ 2366.238403] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2366.249176] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825060, 'name': Rename_Task} progress is 5%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2366.748533] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825060, 'name': Rename_Task, 'duration_secs': 0.221131} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2366.748815] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Powering on the VM {{(pid=67008) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 2366.749066] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df66127e-6855-40b3-a65c-7d7720eddba7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.754873] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2366.754873] env[67008]: value = "task-2825061" [ 2366.754873] env[67008]: _type = "Task" [ 2366.754873] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2366.761911] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825061, 'name': PowerOnVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.264734] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825061, 'name': PowerOnVM_Task} progress is 66%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.765352] env[67008]: DEBUG oslo_vmware.api [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825061, 'name': PowerOnVM_Task, 'duration_secs': 0.931704} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2367.765599] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Powered on the VM {{(pid=67008) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 2367.765800] env[67008]: INFO nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Took 6.69 seconds to spawn the instance on the hypervisor. [ 2367.766073] env[67008]: DEBUG nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Checking state {{(pid=67008) _get_power_state /opt/stack/nova/nova/compute/manager.py:1766}} [ 2367.766815] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc75f23-21bb-4f32-83c2-7ce7cc437a2e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.827020] env[67008]: INFO nova.compute.manager [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Took 7.46 seconds to build instance. [ 2367.865520] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7c2b2cf5-7030-4adf-92be-9a602e37c6fc tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "4c046377-d7d1-424a-866e-803354a29a26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 7.558s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2368.819188] env[67008]: DEBUG nova.compute.manager [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Received event network-changed-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2368.819461] env[67008]: DEBUG nova.compute.manager [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Refreshing instance network info cache due to event network-changed-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2368.819584] env[67008]: DEBUG oslo_concurrency.lockutils [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] Acquiring lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2368.819725] env[67008]: DEBUG oslo_concurrency.lockutils [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] Acquired lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2368.819881] env[67008]: DEBUG nova.network.neutron [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Refreshing network info cache for port 8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2369.092657] env[67008]: DEBUG nova.network.neutron [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updated VIF entry in instance network info cache for port 8c5edd9e-52b1-40b7-bdd4-9ec61dda167e. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2369.093044] env[67008]: DEBUG nova.network.neutron [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updating instance_info_cache with network_info: [{"id": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "address": "fa:16:3e:19:ee:11", "network": {"id": "93abbf34-c608-43ef-8b96-fbfa7600ec91", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1493811503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38b21c167e38410a9e22df2f7436b252", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "777870ab-362f-4a17-9c1c-8d9cc26cd4ce", "external-id": "nsx-vlan-transportzone-987", "segmentation_id": 987, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c5edd9e-52", "ovs_interfaceid": "8c5edd9e-52b1-40b7-bdd4-9ec61dda167e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2369.108221] env[67008]: DEBUG oslo_concurrency.lockutils [req-f07c941e-0893-48c2-a383-241836fa2392 req-38466403-a045-4af4-8e5d-bd67400240cc service nova] Releasing lock "refresh_cache-4c046377-d7d1-424a-866e-803354a29a26" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2371.852564] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2372.857558] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2372.857912] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances with incomplete migration {{(pid=67008) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2374.867364] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2374.867839] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Cleaning up deleted instances {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2374.878707] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] There are 0 instances to clean {{(pid=67008) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2381.680867] env[67008]: WARNING oslo_vmware.rw_handles [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles response.begin() [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2381.680867] env[67008]: ERROR oslo_vmware.rw_handles [ 2381.681384] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Downloaded image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2381.683648] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Caching image {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2381.683963] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Copying Virtual Disk [datastore1] vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk to [datastore1] vmware_temp/40295a7e-f939-40cf-9dbc-c52e7de2a397/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk {{(pid=67008) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2381.684224] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2d8287e-f27d-406b-9343-cbb005eb1ec7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.692229] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2381.692229] env[67008]: value = "task-2825062" [ 2381.692229] env[67008]: _type = "Task" [ 2381.692229] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.700226] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825062, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.203010] env[67008]: DEBUG oslo_vmware.exceptions [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Fault InvalidArgument not matched. {{(pid=67008) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2382.203342] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2382.203891] env[67008]: ERROR nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2382.203891] env[67008]: Faults: ['InvalidArgument'] [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Traceback (most recent call last): [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] yield resources [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self.driver.spawn(context, instance, image_meta, [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self._fetch_image_if_missing(context, vi) [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] image_cache(vi, tmp_image_ds_loc) [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] vm_util.copy_virtual_disk( [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] session._wait_for_task(vmdk_copy_task) [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] return self.wait_for_task(task_ref) [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] return evt.wait() [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] result = hub.switch() [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] return self.greenlet.switch() [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self.f(*self.args, **self.kw) [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] raise exceptions.translate_fault(task_info.error) [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Faults: ['InvalidArgument'] [ 2382.203891] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] [ 2382.204824] env[67008]: INFO nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Terminating instance [ 2382.205643] env[67008]: DEBUG oslo_concurrency.lockutils [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2382.205850] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2382.206100] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f4a47e7-433b-4a47-8054-69cfd9180131 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.208215] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2382.208415] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2382.209209] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260b6eaf-8c59-4fff-b423-7e83db8c4502 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.215849] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2382.216067] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60432716-25b0-4d61-81e0-3b4518fb821a {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.218127] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2382.218281] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=67008) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2382.219206] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b39d552-0b9b-4cc3-9cd7-3f728a9930d0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.223802] env[67008]: DEBUG oslo_vmware.api [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Waiting for the task: (returnval){ [ 2382.223802] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f26e08-f40a-cfa8-6819-442ea8d47466" [ 2382.223802] env[67008]: _type = "Task" [ 2382.223802] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.233894] env[67008]: DEBUG oslo_vmware.api [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52f26e08-f40a-cfa8-6819-442ea8d47466, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.290132] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2382.290366] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Deleting contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2382.290742] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleting the datastore file [datastore1] 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2382.291089] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1aa1e49-6db8-46a4-9736-46af4b3286bf {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.297684] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for the task: (returnval){ [ 2382.297684] env[67008]: value = "task-2825064" [ 2382.297684] env[67008]: _type = "Task" [ 2382.297684] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.306285] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825064, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.734303] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Preparing fetch location {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2382.734580] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating directory with path [datastore1] vmware_temp/aa9d7e4b-2b9f-4ac1-b57f-0f5f83579f50/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2382.734793] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82bed377-0f6a-4504-bdbb-308d59fb20a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.746091] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Created directory with path [datastore1] vmware_temp/aa9d7e4b-2b9f-4ac1-b57f-0f5f83579f50/ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2382.746283] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Fetch image to [datastore1] vmware_temp/aa9d7e4b-2b9f-4ac1-b57f-0f5f83579f50/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2382.746465] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to [datastore1] vmware_temp/aa9d7e4b-2b9f-4ac1-b57f-0f5f83579f50/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk on the data store datastore1 {{(pid=67008) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2382.747185] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1323e3-2773-4a2d-b1fc-2ea6278f1b76 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.753871] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea511bf7-9f9e-4130-9e0b-fd2b18904c76 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.762961] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d6d393-c83f-4561-9589-3b419d694a2b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.463175] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c880acfc-14f6-47d9-9ae7-88a52b5654dc {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.472701] env[67008]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f244fd4f-833a-42fb-ba1a-2300784e4d29 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.474378] env[67008]: DEBUG oslo_vmware.api [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Task: {'id': task-2825064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077128} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2383.474613] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2383.474787] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Deleted contents of the VM from datastore datastore1 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2383.474951] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2383.475152] env[67008]: INFO nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2383.477109] env[67008]: DEBUG nova.compute.claims [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Aborting claim: {{(pid=67008) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2383.477317] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2383.477527] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2383.495022] env[67008]: DEBUG nova.virt.vmwareapi.images [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] [instance: bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9] Downloading image file data ae01aa56-93e6-47e6-accd-8c8a802d92bd to the data store datastore1 {{(pid=67008) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2383.638699] env[67008]: DEBUG oslo_vmware.rw_handles [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aa9d7e4b-2b9f-4ac1-b57f-0f5f83579f50/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2383.712163] env[67008]: DEBUG oslo_vmware.rw_handles [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Completed reading data from the image iterator. {{(pid=67008) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2383.712391] env[67008]: DEBUG oslo_vmware.rw_handles [None req-bda0d762-c8fc-49d4-895c-1f2c2450c0d4 tempest-ServersTestJSON-301886130 tempest-ServersTestJSON-301886130-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aa9d7e4b-2b9f-4ac1-b57f-0f5f83579f50/ae01aa56-93e6-47e6-accd-8c8a802d92bd/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=67008) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2383.757829] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fa0acf-eafa-4312-a910-e2affc33abd4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.765847] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a908ce8-d188-4e7c-b5a5-127cfd66461b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.796894] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c417a6a8-c9a6-4e21-bfcc-10888d4abc42 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.804763] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2b19e3-4b8b-4bd6-bc7b-fe196f003558 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.818324] env[67008]: DEBUG nova.compute.provider_tree [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2383.827123] env[67008]: DEBUG nova.scheduler.client.report [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2383.840778] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2383.841358] env[67008]: ERROR nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2383.841358] env[67008]: Faults: ['InvalidArgument'] [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Traceback (most recent call last): [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self.driver.spawn(context, instance, image_meta, [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self._fetch_image_if_missing(context, vi) [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] image_cache(vi, tmp_image_ds_loc) [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] vm_util.copy_virtual_disk( [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] session._wait_for_task(vmdk_copy_task) [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] return self.wait_for_task(task_ref) [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] return evt.wait() [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] result = hub.switch() [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] return self.greenlet.switch() [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] self.f(*self.args, **self.kw) [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] raise exceptions.translate_fault(task_info.error) [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Faults: ['InvalidArgument'] [ 2383.841358] env[67008]: ERROR nova.compute.manager [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] [ 2383.842217] env[67008]: DEBUG nova.compute.utils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] VimFaultException {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2383.843446] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Build of instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e was re-scheduled: A specified parameter was not correct: fileType [ 2383.843446] env[67008]: Faults: ['InvalidArgument'] {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2383.843813] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Unplugging VIFs for instance {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2383.843983] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=67008) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2383.844167] env[67008]: DEBUG nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2383.844330] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2384.151885] env[67008]: DEBUG nova.network.neutron [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2384.162990] env[67008]: INFO nova.compute.manager [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Took 0.32 seconds to deallocate network for instance. [ 2384.255555] env[67008]: INFO nova.scheduler.client.report [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Deleted allocations for instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e [ 2384.280361] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18088d01-63f9-4e25-9445-e54360370b3e tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 512.077s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2384.280625] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 316.683s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2384.281457] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Acquiring lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2384.281457] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2384.281457] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2384.283255] env[67008]: INFO nova.compute.manager [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Terminating instance [ 2384.285158] env[67008]: DEBUG nova.compute.manager [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2384.285408] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2384.285941] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52a607bc-137c-4ded-bf83-62a1492ce7c4 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.294880] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3249efdd-5ebf-46d1-a8c0-ad56766e5de6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.319726] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e could not be found. [ 2384.319923] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2384.320108] env[67008]: INFO nova.compute.manager [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2384.320347] env[67008]: DEBUG oslo.service.loopingcall [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2384.320557] env[67008]: DEBUG nova.compute.manager [-] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2384.320655] env[67008]: DEBUG nova.network.neutron [-] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2384.341251] env[67008]: DEBUG nova.network.neutron [-] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2384.349286] env[67008]: INFO nova.compute.manager [-] [instance: 3ff618d8-a6db-4ff7-b11f-0c4e161cc98e] Took 0.03 seconds to deallocate network for instance. [ 2384.432769] env[67008]: DEBUG oslo_concurrency.lockutils [None req-9d50006d-937f-4112-9d24-58ef23873e00 tempest-ListServerFiltersTestJSON-248519843 tempest-ListServerFiltersTestJSON-248519843-project-member] Lock "3ff618d8-a6db-4ff7-b11f-0c4e161cc98e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.152s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2386.125429] env[67008]: INFO nova.compute.manager [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Rebuilding instance [ 2386.172165] env[67008]: DEBUG nova.compute.manager [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Checking state {{(pid=67008) _get_power_state /opt/stack/nova/nova/compute/manager.py:1766}} [ 2386.173011] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dbecd7-d6ea-4646-99b2-663455b341d6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.210839] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Powering off the VM {{(pid=67008) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 2386.211503] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7b5f038-a4a3-4e28-a315-fac26edb89a1 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.219220] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2386.219220] env[67008]: value = "task-2825065" [ 2386.219220] env[67008]: _type = "Task" [ 2386.219220] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2386.228215] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.728834] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825065, 'name': PowerOffVM_Task, 'duration_secs': 0.142957} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2386.729098] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Powered off the VM {{(pid=67008) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 2386.729777] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Powering off the VM {{(pid=67008) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 2386.730024] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6002a247-fbe6-4d6d-a81e-5ac45c54672f {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.736066] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2386.736066] env[67008]: value = "task-2825066" [ 2386.736066] env[67008]: _type = "Task" [ 2386.736066] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2386.743060] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825066, 'name': PowerOffVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.247381] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] VM already powered off {{(pid=67008) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 2387.247707] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Volume detach. Driver type: vmdk {{(pid=67008) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2387.247743] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-568101', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'name': 'volume-f9ace753-6dc1-4c53-b3b0-1613d1027785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4c046377-d7d1-424a-866e-803354a29a26', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'serial': 'f9ace753-6dc1-4c53-b3b0-1613d1027785'} {{(pid=67008) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2387.248532] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3918ba81-b59a-4b39-b145-73748af4a3c5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.266052] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3303de90-7d0d-4b8a-8240-d94916dde091 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.272420] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203b66d6-e418-49e2-9ea5-22ded8cfafa9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.289673] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d35f18-153f-446d-ac9f-22aba96581d7 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.304564] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] The volume has not been displaced from its original location: [datastore2] volume-f9ace753-6dc1-4c53-b3b0-1613d1027785/volume-f9ace753-6dc1-4c53-b3b0-1613d1027785.vmdk. No consolidation needed. {{(pid=67008) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2387.309893] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=67008) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2387.310189] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32845849-b68c-48b0-b388-4fefc979a76b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.328461] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2387.328461] env[67008]: value = "task-2825067" [ 2387.328461] env[67008]: _type = "Task" [ 2387.328461] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2387.336193] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825067, 'name': ReconfigVM_Task} progress is 5%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.838728] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825067, 'name': ReconfigVM_Task, 'duration_secs': 0.175283} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2387.840030] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=67008) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2387.843870] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2fe3ad9-f35a-4557-a3bf-b4a39453bae6 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.858810] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2387.858810] env[67008]: value = "task-2825068" [ 2387.858810] env[67008]: _type = "Task" [ 2387.858810] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2387.866090] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.370181] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825068, 'name': ReconfigVM_Task, 'duration_secs': 0.105954} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2388.370527] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-568101', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'name': 'volume-f9ace753-6dc1-4c53-b3b0-1613d1027785', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4c046377-d7d1-424a-866e-803354a29a26', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9ace753-6dc1-4c53-b3b0-1613d1027785', 'serial': 'f9ace753-6dc1-4c53-b3b0-1613d1027785'} {{(pid=67008) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2388.370605] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2388.371337] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b370f477-5393-43bd-9d9f-1903c607d7a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.377601] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Unregistering the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2388.377807] env[67008]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ceb4237d-9145-4ac2-9f6a-1e966bfb7a35 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.444282] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Unregistered the VM {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2388.444511] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Deleting contents of the VM from datastore datastore2 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2388.444632] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Deleting the datastore file [datastore2] 4c046377-d7d1-424a-866e-803354a29a26 {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2388.444888] env[67008]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b488421-41ef-4d9d-8ba0-c2efeb3d6a23 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.450647] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for the task: (returnval){ [ 2388.450647] env[67008]: value = "task-2825070" [ 2388.450647] env[67008]: _type = "Task" [ 2388.450647] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2388.457886] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825070, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.960439] env[67008]: DEBUG oslo_vmware.api [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Task: {'id': task-2825070, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075834} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2388.960696] env[67008]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Deleted the datastore file {{(pid=67008) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2388.960874] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Deleted contents of the VM from datastore datastore2 {{(pid=67008) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2388.961082] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2389.013812] env[67008]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Volume detach. Driver type: vmdk {{(pid=67008) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2389.014141] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d26008f-ff2e-4388-891b-46966e99a717 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.022327] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15851783-f0dc-4990-a59b-bb05d9b278b2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.046586] env[67008]: ERROR nova.compute.manager [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Failed to detach volume f9ace753-6dc1-4c53-b3b0-1613d1027785 from /dev/sda: nova.exception.InstanceNotFound: Instance 4c046377-d7d1-424a-866e-803354a29a26 could not be found. [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] Traceback (most recent call last): [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 4116, in _do_rebuild_instance [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self.driver.rebuild(**kwargs) [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/driver.py", line 384, in rebuild [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] raise NotImplementedError() [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] NotImplementedError [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] During handling of the above exception, another exception occurred: [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] Traceback (most recent call last): [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3539, in _detach_root_volume [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self.driver.detach_volume(context, old_connection_info, [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] return self._volumeops.detach_volume(connection_info, instance) [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self._detach_volume_vmdk(connection_info, instance) [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] stable_ref.fetch_moref(session) [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] nova.exception.InstanceNotFound: Instance 4c046377-d7d1-424a-866e-803354a29a26 could not be found. [ 2389.046586] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.171505] env[67008]: DEBUG nova.compute.utils [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Build of instance 4c046377-d7d1-424a-866e-803354a29a26 aborted: Failed to rebuild volume backed instance. {{(pid=67008) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2389.173906] env[67008]: ERROR nova.compute.manager [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 4c046377-d7d1-424a-866e-803354a29a26 aborted: Failed to rebuild volume backed instance. [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] Traceback (most recent call last): [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 4116, in _do_rebuild_instance [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self.driver.rebuild(**kwargs) [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/driver.py", line 384, in rebuild [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] raise NotImplementedError() [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] NotImplementedError [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] During handling of the above exception, another exception occurred: [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] Traceback (most recent call last): [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3574, in _rebuild_volume_backed_instance [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self._detach_root_volume(context, instance, root_bdm) [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3553, in _detach_root_volume [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] with excutils.save_and_reraise_exception(): [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self.force_reraise() [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] raise self.value [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3539, in _detach_root_volume [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self.driver.detach_volume(context, old_connection_info, [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] return self._volumeops.detach_volume(connection_info, instance) [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self._detach_volume_vmdk(connection_info, instance) [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] stable_ref.fetch_moref(session) [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] nova.exception.InstanceNotFound: Instance 4c046377-d7d1-424a-866e-803354a29a26 could not be found. [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] During handling of the above exception, another exception occurred: [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] Traceback (most recent call last): [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 10835, in _error_out_instance_on_exception [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] yield [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3842, in rebuild_instance [ 2389.173906] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self._do_rebuild_instance_with_claim( [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3928, in _do_rebuild_instance_with_claim [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self._do_rebuild_instance( [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 4120, in _do_rebuild_instance [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self._rebuild_default_impl(**kwargs) [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3697, in _rebuild_default_impl [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] self._rebuild_volume_backed_instance( [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] File "/opt/stack/nova/nova/compute/manager.py", line 3589, in _rebuild_volume_backed_instance [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] raise exception.BuildAbortException( [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] nova.exception.BuildAbortException: Build of instance 4c046377-d7d1-424a-866e-803354a29a26 aborted: Failed to rebuild volume backed instance. [ 2389.175050] env[67008]: ERROR nova.compute.manager [instance: 4c046377-d7d1-424a-866e-803354a29a26] [ 2389.261095] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2389.261380] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2389.300105] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4f0523-d971-4cb0-8a23-2b90d65ca393 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.307815] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272126f1-6ef2-4f7e-b657-630975b40f9c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.338155] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55acec4d-717a-41a8-9052-2b96fec388ee {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.345478] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d056aef4-b566-447c-97f5-bd4b12be9ae2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.358397] env[67008]: DEBUG nova.compute.provider_tree [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2389.366936] env[67008]: DEBUG nova.scheduler.client.report [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2389.382786] env[67008]: DEBUG oslo_concurrency.lockutils [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.121s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2389.383154] env[67008]: INFO nova.compute.manager [None req-7bc5a5c6-1d5f-4584-8966-86944afd75c6 tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Successfully reverted task state from rebuilding on failure for instance. [ 2389.767029] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquiring lock "4c046377-d7d1-424a-866e-803354a29a26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2389.767272] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "4c046377-d7d1-424a-866e-803354a29a26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2389.767479] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquiring lock "4c046377-d7d1-424a-866e-803354a29a26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2389.767660] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "4c046377-d7d1-424a-866e-803354a29a26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2389.767840] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "4c046377-d7d1-424a-866e-803354a29a26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2389.769674] env[67008]: INFO nova.compute.manager [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Terminating instance [ 2389.771510] env[67008]: DEBUG nova.compute.manager [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Start destroying the instance on the hypervisor. {{(pid=67008) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2389.771926] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3203243-153b-474d-bd14-55fb49f5cdeb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.780936] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ef51a5-ec4b-4123-a6c6-35132d84c9a5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.805500] env[67008]: WARNING nova.virt.vmwareapi.driver [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 4c046377-d7d1-424a-866e-803354a29a26 could not be found. [ 2389.805662] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Destroying instance {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2389.805945] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bdb1b5cf-90c8-453f-9943-f5706d429c1e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.813509] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2bc0b1-eff6-4fe4-a43d-e1e3f3032a77 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.836358] env[67008]: WARNING nova.virt.vmwareapi.vmops [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c046377-d7d1-424a-866e-803354a29a26 could not be found. [ 2389.836551] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Instance destroyed {{(pid=67008) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2389.836730] env[67008]: INFO nova.compute.manager [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Took 0.07 seconds to destroy the instance on the hypervisor. [ 2389.836970] env[67008]: DEBUG oslo.service.loopingcall [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2389.837197] env[67008]: DEBUG nova.compute.manager [-] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Deallocating network for instance {{(pid=67008) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2389.837297] env[67008]: DEBUG nova.network.neutron [-] [instance: 4c046377-d7d1-424a-866e-803354a29a26] deallocate_for_instance() {{(pid=67008) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2390.426471] env[67008]: DEBUG nova.compute.manager [req-cabcb96d-4da4-4c00-a519-46fe8c24ccf0 req-f1aa6fee-dd6f-4376-8379-3e908390f626 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Received event network-vif-deleted-8c5edd9e-52b1-40b7-bdd4-9ec61dda167e {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2390.426743] env[67008]: INFO nova.compute.manager [req-cabcb96d-4da4-4c00-a519-46fe8c24ccf0 req-f1aa6fee-dd6f-4376-8379-3e908390f626 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Neutron deleted interface 8c5edd9e-52b1-40b7-bdd4-9ec61dda167e; detaching it from the instance and deleting it from the info cache [ 2390.427049] env[67008]: DEBUG nova.network.neutron [req-cabcb96d-4da4-4c00-a519-46fe8c24ccf0 req-f1aa6fee-dd6f-4376-8379-3e908390f626 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2390.435462] env[67008]: DEBUG nova.network.neutron [-] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Updating instance_info_cache with network_info: [] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2390.440607] env[67008]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-acdbe9ad-f80f-4671-afbb-a3add1b3b85c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.447068] env[67008]: INFO nova.compute.manager [-] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Took 0.61 seconds to deallocate network for instance. [ 2390.455195] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1936ac72-4de2-4adc-82e4-a59988bbc89c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.481059] env[67008]: DEBUG nova.compute.manager [req-cabcb96d-4da4-4c00-a519-46fe8c24ccf0 req-f1aa6fee-dd6f-4376-8379-3e908390f626 service nova] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Detach interface failed, port_id=8c5edd9e-52b1-40b7-bdd4-9ec61dda167e, reason: Instance 4c046377-d7d1-424a-866e-803354a29a26 could not be found. {{(pid=67008) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10935}} [ 2390.520517] env[67008]: INFO nova.compute.manager [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Took 0.07 seconds to detach 1 volumes for instance. [ 2390.523186] env[67008]: DEBUG nova.compute.manager [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] [instance: 4c046377-d7d1-424a-866e-803354a29a26] Deleting volume: f9ace753-6dc1-4c53-b3b0-1613d1027785 {{(pid=67008) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3221}} [ 2390.594650] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2390.594650] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2390.594650] env[67008]: DEBUG nova.objects.instance [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lazy-loading 'resources' on Instance uuid 4c046377-d7d1-424a-866e-803354a29a26 {{(pid=67008) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1152}} [ 2390.652489] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f958750e-0519-43e6-8cb9-22054a9ab1fe {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.660404] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e20061a-cdb6-491a-9d29-562c2b314736 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.690892] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30b18a3-894c-4ae1-8f61-859c487affab {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.698740] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4cbf95-e083-4022-b96f-dd9da940b687 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.714778] env[67008]: DEBUG nova.compute.provider_tree [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2390.726077] env[67008]: DEBUG nova.scheduler.client.report [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2390.741078] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.146s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2390.805275] env[67008]: DEBUG oslo_concurrency.lockutils [None req-18031fcd-7b5b-46ca-8015-6bf9d1fd00fd tempest-ServerActionsV293TestJSON-145116815 tempest-ServerActionsV293TestJSON-145116815-project-member] Lock "4c046377-d7d1-424a-866e-803354a29a26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.037s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2396.440314] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "3d1969fc-4175-406c-bba7-f9167559689f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2396.441531] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Lock "3d1969fc-4175-406c-bba7-f9167559689f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2396.455430] env[67008]: DEBUG nova.compute.manager [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2396.514725] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2396.515009] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2396.516516] env[67008]: INFO nova.compute.claims [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2396.626386] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40067ef1-faf1-4192-8a43-00e4a857931b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.634016] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fd379e-3285-483c-8ea2-b954a7022fc2 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.664788] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcbf39a-64e1-4432-91a9-536f1c4c2f39 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.672879] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696b44a8-3a0a-4086-beda-958797d19643 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.686433] env[67008]: DEBUG nova.compute.provider_tree [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2396.695519] env[67008]: DEBUG nova.scheduler.client.report [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2396.708888] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.194s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2396.709404] env[67008]: DEBUG nova.compute.manager [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2396.747506] env[67008]: DEBUG nova.compute.utils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2396.753049] env[67008]: DEBUG nova.compute.manager [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2396.753243] env[67008]: DEBUG nova.network.neutron [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2396.768192] env[67008]: DEBUG nova.compute.manager [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2396.836765] env[67008]: DEBUG nova.policy [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c92dc69658c4c96a72a0b323b93b2b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9fe89e65eee497497b2536d045ab94a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 2396.844780] env[67008]: DEBUG nova.compute.manager [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2396.873658] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2396.873907] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2396.874083] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2396.874330] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2396.874601] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2396.877019] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2396.877019] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2396.877019] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2396.877019] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2396.877019] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2396.877019] env[67008]: DEBUG nova.virt.hardware [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2396.877019] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8e06bc-a31f-4ab4-a771-97e0541d7fc0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.884844] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9b16cf-8861-4324-ac0d-b4b2306eb5a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.049466] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "c13a1eb8-8d09-4da4-9819-4be7b132d6fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2397.049694] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Lock "c13a1eb8-8d09-4da4-9819-4be7b132d6fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2397.061078] env[67008]: DEBUG nova.compute.manager [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Starting instance... {{(pid=67008) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2397.131660] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2397.131908] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2397.134776] env[67008]: INFO nova.compute.claims [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2397.153734] env[67008]: DEBUG nova.network.neutron [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Successfully created port: 98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2397.264270] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1025a1d-9d1b-4580-9670-f7278eadd3fb {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.271959] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6185aaef-f713-4244-94a0-163aa4377f0e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.304968] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cb8c33-827a-49f6-91ab-f3ed3f3405a0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.314172] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfd164b-6deb-4971-8b23-e686d66d41e0 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.328564] env[67008]: DEBUG nova.compute.provider_tree [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Inventory has not changed in ProviderTree for provider: ad100a41-192a-4a03-bdd9-0a78ce856705 {{(pid=67008) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2397.337738] env[67008]: DEBUG nova.scheduler.client.report [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Inventory has not changed for provider ad100a41-192a-4a03-bdd9-0a78ce856705 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=67008) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2397.353730] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.222s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2397.356016] env[67008]: DEBUG nova.compute.manager [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Start building networks asynchronously for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2397.389243] env[67008]: DEBUG nova.compute.utils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Using /dev/sd instead of None {{(pid=67008) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2397.393209] env[67008]: DEBUG nova.compute.manager [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Allocating IP information in the background. {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2397.393392] env[67008]: DEBUG nova.network.neutron [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] allocate_for_instance() {{(pid=67008) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2397.403363] env[67008]: DEBUG nova.compute.manager [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Start building block device mappings for instance. {{(pid=67008) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2397.464074] env[67008]: DEBUG nova.policy [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c92dc69658c4c96a72a0b323b93b2b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9fe89e65eee497497b2536d045ab94a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=67008) authorize /opt/stack/nova/nova/policy.py:203}} [ 2397.474708] env[67008]: DEBUG nova.compute.manager [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Start spawning the instance on the hypervisor. {{(pid=67008) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2397.502021] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T22:09:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T22:09:12Z,direct_url=,disk_format='vmdk',id=ae01aa56-93e6-47e6-accd-8c8a802d92bd,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='26193804e2bc4e6fa9cf7c325c35a944',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T22:09:13Z,virtual_size=,visibility=), allow threads: False {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2397.502295] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Flavor limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2397.502443] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Image limits 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2397.502629] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Flavor pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2397.502775] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Image pref 0:0:0 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2397.502916] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=67008) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2397.503337] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2397.503522] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2397.503688] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Got 1 possible topologies {{(pid=67008) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2397.503846] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2397.504019] env[67008]: DEBUG nova.virt.hardware [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=67008) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2397.504853] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b4fb3d-34da-4b23-8a32-457c45fdbf99 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.512815] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd220c8-89b9-47cc-88b5-786c8520e94c {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2397.770347] env[67008]: DEBUG nova.network.neutron [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Successfully created port: e24b5bee-ddc6-4316-b77b-65f6786c5e9f {{(pid=67008) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2397.848499] env[67008]: DEBUG nova.compute.manager [req-257a2703-6269-450d-8221-59591bd4416a req-3b7b2361-2366-4111-9797-bd05a414da5c service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Received event network-vif-plugged-98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2397.848727] env[67008]: DEBUG oslo_concurrency.lockutils [req-257a2703-6269-450d-8221-59591bd4416a req-3b7b2361-2366-4111-9797-bd05a414da5c service nova] Acquiring lock "3d1969fc-4175-406c-bba7-f9167559689f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2397.848944] env[67008]: DEBUG oslo_concurrency.lockutils [req-257a2703-6269-450d-8221-59591bd4416a req-3b7b2361-2366-4111-9797-bd05a414da5c service nova] Lock "3d1969fc-4175-406c-bba7-f9167559689f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2397.849155] env[67008]: DEBUG oslo_concurrency.lockutils [req-257a2703-6269-450d-8221-59591bd4416a req-3b7b2361-2366-4111-9797-bd05a414da5c service nova] Lock "3d1969fc-4175-406c-bba7-f9167559689f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2397.849326] env[67008]: DEBUG nova.compute.manager [req-257a2703-6269-450d-8221-59591bd4416a req-3b7b2361-2366-4111-9797-bd05a414da5c service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] No waiting events found dispatching network-vif-plugged-98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2397.849492] env[67008]: WARNING nova.compute.manager [req-257a2703-6269-450d-8221-59591bd4416a req-3b7b2361-2366-4111-9797-bd05a414da5c service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Received unexpected event network-vif-plugged-98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf for instance with vm_state building and task_state spawning. [ 2398.370104] env[67008]: DEBUG nova.network.neutron [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Successfully updated port: 98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2398.380916] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "refresh_cache-3d1969fc-4175-406c-bba7-f9167559689f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2398.381114] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquired lock "refresh_cache-3d1969fc-4175-406c-bba7-f9167559689f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2398.382293] env[67008]: DEBUG nova.network.neutron [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2398.410921] env[67008]: DEBUG nova.compute.manager [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Received event network-changed-98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2398.411144] env[67008]: DEBUG nova.compute.manager [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Refreshing instance network info cache due to event network-changed-98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2398.411355] env[67008]: DEBUG oslo_concurrency.lockutils [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] Acquiring lock "refresh_cache-3d1969fc-4175-406c-bba7-f9167559689f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2398.432420] env[67008]: DEBUG nova.network.neutron [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2398.494209] env[67008]: DEBUG nova.compute.manager [req-1c8e9c49-7fe5-4924-8c7a-db5c14754642 req-5a97dfc9-b473-4608-9019-97c59606fe88 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Received event network-vif-plugged-e24b5bee-ddc6-4316-b77b-65f6786c5e9f {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2398.494440] env[67008]: DEBUG oslo_concurrency.lockutils [req-1c8e9c49-7fe5-4924-8c7a-db5c14754642 req-5a97dfc9-b473-4608-9019-97c59606fe88 service nova] Acquiring lock "c13a1eb8-8d09-4da4-9819-4be7b132d6fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2398.494589] env[67008]: DEBUG oslo_concurrency.lockutils [req-1c8e9c49-7fe5-4924-8c7a-db5c14754642 req-5a97dfc9-b473-4608-9019-97c59606fe88 service nova] Lock "c13a1eb8-8d09-4da4-9819-4be7b132d6fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2398.494722] env[67008]: DEBUG oslo_concurrency.lockutils [req-1c8e9c49-7fe5-4924-8c7a-db5c14754642 req-5a97dfc9-b473-4608-9019-97c59606fe88 service nova] Lock "c13a1eb8-8d09-4da4-9819-4be7b132d6fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2398.494917] env[67008]: DEBUG nova.compute.manager [req-1c8e9c49-7fe5-4924-8c7a-db5c14754642 req-5a97dfc9-b473-4608-9019-97c59606fe88 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] No waiting events found dispatching network-vif-plugged-e24b5bee-ddc6-4316-b77b-65f6786c5e9f {{(pid=67008) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2398.495093] env[67008]: WARNING nova.compute.manager [req-1c8e9c49-7fe5-4924-8c7a-db5c14754642 req-5a97dfc9-b473-4608-9019-97c59606fe88 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Received unexpected event network-vif-plugged-e24b5bee-ddc6-4316-b77b-65f6786c5e9f for instance with vm_state building and task_state spawning. [ 2398.576523] env[67008]: DEBUG nova.network.neutron [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Successfully updated port: e24b5bee-ddc6-4316-b77b-65f6786c5e9f {{(pid=67008) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2398.585563] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "refresh_cache-c13a1eb8-8d09-4da4-9819-4be7b132d6fd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2398.585711] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquired lock "refresh_cache-c13a1eb8-8d09-4da4-9819-4be7b132d6fd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2398.585857] env[67008]: DEBUG nova.network.neutron [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Building network info cache for instance {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2398.599319] env[67008]: DEBUG nova.network.neutron [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Updating instance_info_cache with network_info: [{"id": "98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf", "address": "fa:16:3e:88:0b:c9", "network": {"id": "e34c780b-da47-4c37-97bb-e874a59880bd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-343457385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9fe89e65eee497497b2536d045ab94a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98f8fbad-f8", "ovs_interfaceid": "98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2398.609923] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Releasing lock "refresh_cache-3d1969fc-4175-406c-bba7-f9167559689f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2398.610353] env[67008]: DEBUG nova.compute.manager [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Instance network_info: |[{"id": "98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf", "address": "fa:16:3e:88:0b:c9", "network": {"id": "e34c780b-da47-4c37-97bb-e874a59880bd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-343457385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9fe89e65eee497497b2536d045ab94a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98f8fbad-f8", "ovs_interfaceid": "98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2398.610662] env[67008]: DEBUG oslo_concurrency.lockutils [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] Acquired lock "refresh_cache-3d1969fc-4175-406c-bba7-f9167559689f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2398.610848] env[67008]: DEBUG nova.network.neutron [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Refreshing network info cache for port 98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2398.611842] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:0b:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2398.620099] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Creating folder: Project (d9fe89e65eee497497b2536d045ab94a). Parent ref: group-v567993. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2398.623338] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7f1b7be-ab6c-477d-9ca0-27b11d6f6086 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.634954] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Created folder: Project (d9fe89e65eee497497b2536d045ab94a) in parent group-v567993. [ 2398.635163] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Creating folder: Instances. Parent ref: group-v568104. {{(pid=67008) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2398.635965] env[67008]: DEBUG nova.network.neutron [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Instance cache missing network info. {{(pid=67008) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2398.637938] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40ccb405-45e5-4bcf-8135-99fd9e540dec {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.646688] env[67008]: INFO nova.virt.vmwareapi.vm_util [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Created folder: Instances in parent group-v568104. [ 2398.646890] env[67008]: DEBUG oslo.service.loopingcall [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2398.647077] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2398.647311] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fb7d0bf-da40-4a5e-9c1f-b8dde17696d5 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.671671] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2398.671671] env[67008]: value = "task-2825074" [ 2398.671671] env[67008]: _type = "Task" [ 2398.671671] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.681518] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825074, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.808044] env[67008]: DEBUG nova.network.neutron [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Updating instance_info_cache with network_info: [{"id": "e24b5bee-ddc6-4316-b77b-65f6786c5e9f", "address": "fa:16:3e:58:7a:17", "network": {"id": "e34c780b-da47-4c37-97bb-e874a59880bd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-343457385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9fe89e65eee497497b2536d045ab94a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape24b5bee-dd", "ovs_interfaceid": "e24b5bee-ddc6-4316-b77b-65f6786c5e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2398.821454] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Releasing lock "refresh_cache-c13a1eb8-8d09-4da4-9819-4be7b132d6fd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2398.822331] env[67008]: DEBUG nova.compute.manager [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Instance network_info: |[{"id": "e24b5bee-ddc6-4316-b77b-65f6786c5e9f", "address": "fa:16:3e:58:7a:17", "network": {"id": "e34c780b-da47-4c37-97bb-e874a59880bd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-343457385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9fe89e65eee497497b2536d045ab94a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape24b5bee-dd", "ovs_interfaceid": "e24b5bee-ddc6-4316-b77b-65f6786c5e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=67008) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2398.823463] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:7a:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c621a9c-66f5-426a-8aab-bd8b2e912106', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e24b5bee-ddc6-4316-b77b-65f6786c5e9f', 'vif_model': 'vmxnet3'}] {{(pid=67008) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2398.832424] env[67008]: DEBUG oslo.service.loopingcall [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=67008) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2398.835263] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Creating VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2398.835639] env[67008]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6f4ca0e-ea91-443f-a086-0752dd6f8c76 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2398.856184] env[67008]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2398.856184] env[67008]: value = "task-2825075" [ 2398.856184] env[67008]: _type = "Task" [ 2398.856184] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2398.866993] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825075, 'name': CreateVM_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2398.895954] env[67008]: DEBUG nova.network.neutron [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Updated VIF entry in instance network info cache for port 98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2398.895954] env[67008]: DEBUG nova.network.neutron [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Updating instance_info_cache with network_info: [{"id": "98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf", "address": "fa:16:3e:88:0b:c9", "network": {"id": "e34c780b-da47-4c37-97bb-e874a59880bd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-343457385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9fe89e65eee497497b2536d045ab94a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98f8fbad-f8", "ovs_interfaceid": "98f8fbad-f82c-4cc4-b7fb-38e8ac6179cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2398.904563] env[67008]: DEBUG oslo_concurrency.lockutils [req-0438b53a-7903-4cae-9858-0fbd9b55d90a req-808879b6-0f59-4cc5-b603-4b9a67f06067 service nova] Releasing lock "refresh_cache-3d1969fc-4175-406c-bba7-f9167559689f" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2399.184676] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825074, 'name': CreateVM_Task, 'duration_secs': 0.291987} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2399.184676] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2399.184676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2399.184676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2399.184676] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2399.184676] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8cb045b-9bef-42c2-aff2-3a48b622c29e {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.189028] env[67008]: DEBUG oslo_vmware.api [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Waiting for the task: (returnval){ [ 2399.189028] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e453e1-8b45-1062-5283-07d01cf4bc02" [ 2399.189028] env[67008]: _type = "Task" [ 2399.189028] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2399.194421] env[67008]: DEBUG oslo_vmware.api [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Task: {'id': session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]52e453e1-8b45-1062-5283-07d01cf4bc02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2399.366151] env[67008]: DEBUG oslo_vmware.api [-] Task: {'id': task-2825075, 'name': CreateVM_Task, 'duration_secs': 0.311341} completed successfully. {{(pid=67008) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2399.366626] env[67008]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Created VM on the ESX host {{(pid=67008) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2399.367477] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2399.700019] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2399.700019] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: 3d1969fc-4175-406c-bba7-f9167559689f] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2399.700019] env[67008]: DEBUG oslo_concurrency.lockutils [None req-ae6b7d6c-7287-4114-a0b0-58f8d71fc164 tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2399.700019] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2399.700019] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2399.700019] env[67008]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4029d4e4-4739-4ee3-b9f1-92af5a67a7b9 {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2399.703361] env[67008]: DEBUG oslo_vmware.api [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Waiting for the task: (returnval){ [ 2399.703361] env[67008]: value = "session[5211c85d-0289-34bb-296c-df7cfa3b4b7b]522f637b-94ad-c784-6795-a31a4a8f9212" [ 2399.703361] env[67008]: _type = "Task" [ 2399.703361] env[67008]: } to complete. {{(pid=67008) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2399.717036] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2399.717423] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Processing image ae01aa56-93e6-47e6-accd-8c8a802d92bd {{(pid=67008) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2399.717772] env[67008]: DEBUG oslo_concurrency.lockutils [None req-59f34ef0-da9c-48b5-a836-097bd7c3b26f tempest-ServerRescueNegativeTestJSON-1559270937 tempest-ServerRescueNegativeTestJSON-1559270937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ae01aa56-93e6-47e6-accd-8c8a802d92bd/ae01aa56-93e6-47e6-accd-8c8a802d92bd.vmdk" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2400.519515] env[67008]: DEBUG nova.compute.manager [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Received event network-changed-e24b5bee-ddc6-4316-b77b-65f6786c5e9f {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2400.519891] env[67008]: DEBUG nova.compute.manager [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Refreshing instance network info cache due to event network-changed-e24b5bee-ddc6-4316-b77b-65f6786c5e9f. {{(pid=67008) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2400.520247] env[67008]: DEBUG oslo_concurrency.lockutils [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] Acquiring lock "refresh_cache-c13a1eb8-8d09-4da4-9819-4be7b132d6fd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2400.520553] env[67008]: DEBUG oslo_concurrency.lockutils [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] Acquired lock "refresh_cache-c13a1eb8-8d09-4da4-9819-4be7b132d6fd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2400.520847] env[67008]: DEBUG nova.network.neutron [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Refreshing network info cache for port e24b5bee-ddc6-4316-b77b-65f6786c5e9f {{(pid=67008) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2400.751574] env[67008]: DEBUG nova.network.neutron [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Updated VIF entry in instance network info cache for port e24b5bee-ddc6-4316-b77b-65f6786c5e9f. {{(pid=67008) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2400.751574] env[67008]: DEBUG nova.network.neutron [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] [instance: c13a1eb8-8d09-4da4-9819-4be7b132d6fd] Updating instance_info_cache with network_info: [{"id": "e24b5bee-ddc6-4316-b77b-65f6786c5e9f", "address": "fa:16:3e:58:7a:17", "network": {"id": "e34c780b-da47-4c37-97bb-e874a59880bd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-343457385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d9fe89e65eee497497b2536d045ab94a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c621a9c-66f5-426a-8aab-bd8b2e912106", "external-id": "nsx-vlan-transportzone-485", "segmentation_id": 485, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape24b5bee-dd", "ovs_interfaceid": "e24b5bee-ddc6-4316-b77b-65f6786c5e9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=67008) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2400.759649] env[67008]: DEBUG oslo_concurrency.lockutils [req-c8671e8d-993b-462d-b03e-db0d5f3a0186 req-92be232b-32e5-443f-8420-a77f447408b3 service nova] Releasing lock "refresh_cache-c13a1eb8-8d09-4da4-9819-4be7b132d6fd" {{(pid=67008) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2402.991572] env[67008]: DEBUG oslo_service.periodic_task [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Running periodic task ComputeManager._sync_power_states {{(pid=67008) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.008230] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Getting list of instances from cluster (obj){ [ 2403.008230] env[67008]: value = "domain-c8" [ 2403.008230] env[67008]: _type = "ClusterComputeResource" [ 2403.008230] env[67008]: } {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2403.009606] env[67008]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aec97de-e8df-437e-904b-3e6ac7d8985b {{(pid=67008) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.022395] env[67008]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Got total of 4 instances {{(pid=67008) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2403.022581] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9 {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2403.022774] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 029e6a15-2f1d-42bf-b5ef-286e82ba7c0e {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2403.022932] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid 3d1969fc-4175-406c-bba7-f9167559689f {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2403.023099] env[67008]: DEBUG nova.compute.manager [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Triggering sync for uuid c13a1eb8-8d09-4da4-9819-4be7b132d6fd {{(pid=67008) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 2403.023408] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "bf8e3bfc-3a49-4d80-86b9-a2befbf1b6a9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2403.023677] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "029e6a15-2f1d-42bf-b5ef-286e82ba7c0e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2403.023886] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "3d1969fc-4175-406c-bba7-f9167559689f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2403.024097] env[67008]: DEBUG oslo_concurrency.lockutils [None req-2b8d20c0-295e-4bf0-a192-61218daa7c52 None None] Acquiring lock "c13a1eb8-8d09-4da4-9819-4be7b132d6fd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=67008) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}}